mirror of
https://github.com/ankitects/anki.git
synced 2025-09-18 14:02:21 -04:00
Compare commits
489 commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
3890e12c9e | ||
![]() |
80cff16250 | ||
![]() |
75d9026be5 | ||
![]() |
6854d13b88 | ||
![]() |
29072654db | ||
![]() |
ec6f09958a | ||
![]() |
c2957746f4 | ||
![]() |
9e415869b8 | ||
![]() |
7e8a1076c1 | ||
![]() |
b97fb45e06 | ||
![]() |
61094d387a | ||
![]() |
90ed4cc115 | ||
![]() |
4506ad0c97 | ||
![]() |
539054c34d | ||
![]() |
cf12c201d8 | ||
![]() |
3b0297d14d | ||
![]() |
58deb14028 | ||
![]() |
5c4d2e87a1 | ||
![]() |
6d31776c25 | ||
![]() |
dda730dfa2 | ||
![]() |
08431106da | ||
![]() |
b4b1c2013f | ||
![]() |
5280cb2f1c | ||
![]() |
b2ab0c0830 | ||
![]() |
6a985c9fb0 | ||
![]() |
db1d04f622 | ||
![]() |
2491eb0316 | ||
![]() |
06f9d41a96 | ||
![]() |
8d5c385c76 | ||
![]() |
153b972dfd | ||
![]() |
4ac80061ca | ||
![]() |
01b825f7c6 | ||
![]() |
157da4c7a7 | ||
![]() |
8ef208e418 | ||
![]() |
65ea013270 | ||
![]() |
ef1a1deb9c | ||
![]() |
c93e11f343 | ||
![]() |
e3d0a30443 | ||
![]() |
4fdb4983dd | ||
![]() |
3521da3ad6 | ||
![]() |
ca60911e19 | ||
![]() |
71ec878780 | ||
![]() |
6dd9daf074 | ||
![]() |
3b33d20849 | ||
![]() |
542c557404 | ||
![]() |
211cbfe660 | ||
![]() |
359231a4d8 | ||
![]() |
d23764b59e | ||
![]() |
1dc31bb360 | ||
![]() |
6fa33777db | ||
![]() |
2fee6f959b | ||
![]() |
3d0a408a2b | ||
![]() |
3e846c8756 | ||
![]() |
79932aad41 | ||
![]() |
2879dc63c3 | ||
![]() |
b92eabf4ae | ||
![]() |
1660a22548 | ||
![]() |
a3b3b0850d | ||
![]() |
562cef1f22 | ||
![]() |
e676e1a484 | ||
![]() |
37f7872565 | ||
![]() |
5c07c899ec | ||
![]() |
054740dd14 | ||
![]() |
78a3b3ef7b | ||
![]() |
f3b4284afb | ||
![]() |
fb2e2bd37a | ||
![]() |
a0c1a398f4 | ||
![]() |
d4862e99da | ||
![]() |
34ed674869 | ||
![]() |
8c7cd80245 | ||
![]() |
68bc4c02cf | ||
![]() |
f4266f0142 | ||
![]() |
d3e8dc6dbf | ||
![]() |
5462d99255 | ||
![]() |
2d60471f36 | ||
![]() |
62e01fe03a | ||
![]() |
5c6e2188e2 | ||
![]() |
ab55440a05 | ||
![]() |
aae9f53e79 | ||
![]() |
a77ffbf4a5 | ||
![]() |
402008950c | ||
![]() |
f7e6e9cb0d | ||
![]() |
2b55882cce | ||
![]() |
0d0c42c6d9 | ||
![]() |
b76918a217 | ||
![]() |
f7974568c9 | ||
![]() |
d13c117e80 | ||
![]() |
8932199513 | ||
![]() |
69d54864a8 | ||
![]() |
baeccfa3e4 | ||
![]() |
e99682a277 | ||
![]() |
4dc00556c1 | ||
![]() |
3dc6b6b3ca | ||
![]() |
c947690aeb | ||
![]() |
1af3c58d40 | ||
![]() |
46bcf4efa6 | ||
![]() |
60750f8e4c | ||
![]() |
661f78557f | ||
![]() |
7172b2d266 | ||
![]() |
78c6db2023 | ||
![]() |
e2692b5ac9 | ||
![]() |
177c483398 | ||
![]() |
20b7bb66db | ||
![]() |
ca0459d8ee | ||
![]() |
e511d63b7e | ||
![]() |
d6e49f8ea5 | ||
![]() |
416e7af02b | ||
![]() |
c74a97a5fa | ||
![]() |
00bc0354c9 | ||
![]() |
aee71afebe | ||
![]() |
ef69f424c1 | ||
![]() |
19f9afba64 | ||
![]() |
229337dbe0 | ||
![]() |
1f3d03f7f8 | ||
![]() |
47c1094195 | ||
![]() |
35a889e1ed | ||
![]() |
65b5aefd07 | ||
![]() |
8c72b03f4c | ||
![]() |
fc845a11a9 | ||
![]() |
aeaf001df7 | ||
![]() |
a3da224511 | ||
![]() |
63ddd0e183 | ||
![]() |
278a84f8d2 | ||
![]() |
0b30155c90 | ||
![]() |
37fe704326 | ||
![]() |
e77cd791de | ||
![]() |
4e29440d6a | ||
![]() |
cc4b0a825e | ||
![]() |
15bbcdd568 | ||
![]() |
12635f4cd2 | ||
![]() |
834fb41015 | ||
![]() |
5a19027185 | ||
![]() |
0375b4aac0 | ||
![]() |
a1934ae9e4 | ||
![]() |
58a8aa7353 | ||
![]() |
4604bc7567 | ||
![]() |
3b18097550 | ||
![]() |
c56fd3ee28 | ||
![]() |
f4e587256c | ||
![]() |
51cf09daf3 | ||
![]() |
dfbb7302e8 | ||
![]() |
1f7f7bc8a3 | ||
![]() |
208729fa3e | ||
![]() |
6744a0a31a | ||
![]() |
1ad82ea8b5 | ||
![]() |
1098d9ac2a | ||
![]() |
778ab76586 | ||
![]() |
c57b7c496d | ||
![]() |
fabed12f4b | ||
![]() |
84658e9cec | ||
![]() |
11c3e60615 | ||
![]() |
3d9fbfd97f | ||
![]() |
80ff9a120c | ||
![]() |
037dfa1bc1 | ||
![]() |
3adcf05ca6 | ||
![]() |
3bd725b6be | ||
![]() |
0009e798e1 | ||
![]() |
436a1d78bc | ||
![]() |
2e74101ca4 | ||
![]() |
7fe201d6bd | ||
![]() |
8a3b72e6e5 | ||
![]() |
d3e1fd1f80 | ||
![]() |
3d6b4761e4 | ||
![]() |
1ca31413f7 | ||
![]() |
b205008a5e | ||
![]() |
b16439fc9c | ||
![]() |
f927aa5788 | ||
![]() |
a83a6b5928 | ||
![]() |
052b9231ec | ||
![]() |
1b51c16e26 | ||
![]() |
de2d1477de | ||
![]() |
ccc0c7cdbb | ||
![]() |
deaf25f757 | ||
![]() |
93dbd6e4cf | ||
![]() |
7b0289b5d3 | ||
![]() |
08a8b6691c | ||
![]() |
fc6447a938 | ||
![]() |
d1793550b0 | ||
![]() |
cedece5cae | ||
![]() |
2594dcb2bb | ||
![]() |
09495d3a8b | ||
![]() |
f5285f359a | ||
![]() |
fba1d7b4b0 | ||
![]() |
4232185735 | ||
![]() |
0b5218706a | ||
![]() |
bb1b289690 | ||
![]() |
e81a7e8b1a | ||
![]() |
da90705346 | ||
![]() |
9e1690774c | ||
![]() |
ee5e8c9230 | ||
![]() |
b6c70f7b75 | ||
![]() |
944e453419 | ||
![]() |
14eb297bbf | ||
![]() |
a07370f565 | ||
![]() |
bf36e10519 | ||
![]() |
b22b3310d6 | ||
![]() |
7720c7de1a | ||
![]() |
0be87b887e | ||
![]() |
bce3cabf9b | ||
![]() |
ad34b76fa9 | ||
![]() |
e0727b1bc8 | ||
![]() |
18cac8bbe5 | ||
![]() |
045e571edf | ||
![]() |
469fd763c7 | ||
![]() |
6eb1db0f5d | ||
![]() |
349a696f93 | ||
![]() |
66f34da7ef | ||
![]() |
3d7dc32777 | ||
![]() |
58dfb9cdd3 | ||
![]() |
185fdebb63 | ||
![]() |
0739ea58f8 | ||
![]() |
5c23ac5a86 | ||
![]() |
f94d05bcbe | ||
![]() |
b8963b463e | ||
![]() |
bdb3c714dc | ||
![]() |
731e7d5b5c | ||
![]() |
f89ab00236 | ||
![]() |
b872852afe | ||
![]() |
aa8dfe1cf4 | ||
![]() |
f5073b402a | ||
![]() |
a587343f29 | ||
![]() |
cfeb71724d | ||
![]() |
aae970bed9 | ||
![]() |
63dcfde005 | ||
![]() |
fe5dfe9ec2 | ||
![]() |
5f73725a64 | ||
![]() |
ad0dbb563a | ||
![]() |
e505ca032b | ||
![]() |
fdce765861 | ||
![]() |
ae6cf98f40 | ||
![]() |
bedab0a54b | ||
![]() |
de7de82f76 | ||
![]() |
73edf23954 | ||
![]() |
9b287dc51a | ||
![]() |
7edd9221ac | ||
![]() |
630bdd3189 | ||
![]() |
992fb054bd | ||
![]() |
d6f93fab76 | ||
![]() |
06195d1268 | ||
![]() |
a73f1507ba | ||
![]() |
b250a2f724 | ||
![]() |
d2f818fad2 | ||
![]() |
eb6c977e08 | ||
![]() |
782645d92e | ||
![]() |
246fa75a35 | ||
![]() |
cfd448565a | ||
![]() |
5cc3a2276b | ||
![]() |
c28306eb94 | ||
![]() |
88538d8bad | ||
![]() |
cc395f7c44 | ||
![]() |
a4c95f5fbd | ||
![]() |
b781dfabf5 | ||
![]() |
718f39fdf3 | ||
![]() |
b2dc5a0263 | ||
![]() |
d542ae9065 | ||
![]() |
cd71931506 | ||
![]() |
4abc0eb8b8 | ||
![]() |
a60a955c61 | ||
![]() |
a41c60c016 | ||
![]() |
8e20973c52 | ||
![]() |
cd411927cc | ||
![]() |
344cac1ef4 | ||
![]() |
f98f620116 | ||
![]() |
04996c77f3 | ||
![]() |
bbf533b172 | ||
![]() |
ba0d590c16 | ||
![]() |
a63e4ef1c8 | ||
![]() |
c5eb00cb42 | ||
![]() |
44f3bbbbc9 | ||
![]() |
4040a3c1f9 | ||
![]() |
669312d5eb | ||
![]() |
b4cee124c0 | ||
![]() |
5cb191d624 | ||
![]() |
615bbf95a1 | ||
![]() |
11cc9287ff | ||
![]() |
b1f3783a6a | ||
![]() |
ccc42227d8 | ||
![]() |
6004616672 | ||
![]() |
e728e8bcb1 | ||
![]() |
22f4a83222 | ||
![]() |
83131cb48e | ||
![]() |
fe750dba9f | ||
![]() |
ccc9d9027a | ||
![]() |
ce6497cd2b | ||
![]() |
e5d34fbb18 | ||
![]() |
c1fc45928d | ||
![]() |
e66bbc62ec | ||
![]() |
2427743751 | ||
![]() |
933d63545a | ||
![]() |
50b7588231 | ||
![]() |
55ecbc1125 | ||
![]() |
8add993fca | ||
![]() |
3ab8c2294d | ||
![]() |
29e3146e1f | ||
![]() |
996fa8bcb0 | ||
![]() |
174b199164 | ||
![]() |
27c1ed1899 | ||
![]() |
bbac90d97d | ||
![]() |
4e928fd7ca | ||
![]() |
2de0c79ba5 | ||
![]() |
37984233cc | ||
![]() |
a99beb71fe | ||
![]() |
06c0e4c14a | ||
![]() |
f81a9bfdfb | ||
![]() |
96ff27d1fb | ||
![]() |
757247d424 | ||
![]() |
6cdebd7638 | ||
![]() |
f9f0894162 | ||
![]() |
14b8a8ad0d | ||
![]() |
7a8b4a193f | ||
![]() |
f29bcb743b | ||
![]() |
1e6d12b830 | ||
![]() |
dfee38898d | ||
![]() |
16c5eaf00a | ||
![]() |
420cd237df | ||
![]() |
6a1d55ae75 | ||
![]() |
8694b3b410 | ||
![]() |
ec513dfde7 | ||
![]() |
d39284e101 | ||
![]() |
e989564be2 | ||
![]() |
3b7f21e50e | ||
![]() |
28e9c5a630 | ||
![]() |
31d877f20d | ||
![]() |
150683ebe2 | ||
![]() |
7b4a7dcf18 | ||
![]() |
8a61a5470c | ||
![]() |
63fd1ffbff | ||
![]() |
d667abffd1 | ||
![]() |
ed13a351b9 | ||
![]() |
277061498d | ||
![]() |
a402670c4e | ||
![]() |
48a0640a07 | ||
![]() |
d3d6bd8ce0 | ||
![]() |
1157465844 | ||
![]() |
413b73d9f4 | ||
![]() |
97b12b420a | ||
![]() |
8f2c708751 | ||
![]() |
a2a1f597be | ||
![]() |
4e1a901738 | ||
![]() |
f96c8c2ac8 | ||
![]() |
6427ff3db5 | ||
![]() |
86c89907e7 | ||
![]() |
f7cdf4eb9e | ||
![]() |
f727934a42 | ||
![]() |
37dfbca094 | ||
![]() |
1c69333210 | ||
![]() |
5080451829 | ||
![]() |
c33974f6ab | ||
![]() |
573f59fab1 | ||
![]() |
5cc44b3f68 | ||
![]() |
9025202204 | ||
![]() |
80618cad85 | ||
![]() |
1124a63798 | ||
![]() |
cbb202a46f | ||
![]() |
6a07c6e561 | ||
![]() |
e06852a0ed | ||
![]() |
dcc6000f70 | ||
![]() |
d1bb69aaec | ||
![]() |
b84f2d7873 | ||
![]() |
0277721280 | ||
![]() |
a66f8b2b5f | ||
![]() |
c70e9d26c5 | ||
![]() |
bcb28f0a85 | ||
![]() |
57ecfbe562 | ||
![]() |
1f95d030bb | ||
![]() |
bd67e9fe1b | ||
![]() |
963fcf7c60 | ||
![]() |
ad12046e87 | ||
![]() |
ef37952ba0 | ||
![]() |
92cfb7340e | ||
![]() |
51b5086b01 | ||
![]() |
07033435a6 | ||
![]() |
6ff023f6a1 | ||
![]() |
ad073ab10c | ||
![]() |
e748cec5d1 | ||
![]() |
1e2f11a271 | ||
![]() |
2acdc8c30a | ||
![]() |
62bad44eed | ||
![]() |
90f2e06b17 | ||
![]() |
6d0e52e8a0 | ||
![]() |
2406830ff9 | ||
![]() |
9872645d5a | ||
![]() |
a5778f3377 | ||
![]() |
0321c26e73 | ||
![]() |
7556f71b96 | ||
![]() |
365d50012c | ||
![]() |
863fe3cd50 | ||
![]() |
0aaa6383f8 | ||
![]() |
ab17fc147c | ||
![]() |
e096c462fa | ||
![]() |
1e6c8b2006 | ||
![]() |
e861364092 | ||
![]() |
b5f15491a0 | ||
![]() |
a2e0060470 | ||
![]() |
7bebcad864 | ||
![]() |
fe2c1510ca | ||
![]() |
efaaae8ce4 | ||
![]() |
b23a6af63e | ||
![]() |
aacf8ec774 | ||
![]() |
79b19a17a3 | ||
![]() |
1e74e8e86e | ||
![]() |
72abb7ec5b | ||
![]() |
dd0abfc200 | ||
![]() |
ddb8573e8d | ||
![]() |
5b0f371791 | ||
![]() |
a74fd74631 | ||
![]() |
1a68c9f5d5 | ||
![]() |
7969b4061f | ||
![]() |
1d2d6e51b9 | ||
![]() |
e7fbf159a6 | ||
![]() |
781a23c6c4 | ||
![]() |
369dec9319 | ||
![]() |
066f5fd281 | ||
![]() |
9d167feb8f | ||
![]() |
8b2a64852b | ||
![]() |
4f6dcb0b5b | ||
![]() |
1fa99c97e4 | ||
![]() |
e546c6d11f | ||
![]() |
332830e5d7 | ||
![]() |
d9c71a54cf | ||
![]() |
0f9216c127 | ||
![]() |
480e8f5409 | ||
![]() |
56613be933 | ||
![]() |
ab75e3d49b | ||
![]() |
ffbc9a77b7 | ||
![]() |
fab6ee96fe | ||
![]() |
9e3239baf4 | ||
![]() |
475fdf04a4 | ||
![]() |
ccab18b7ba | ||
![]() |
1798620d64 | ||
![]() |
aa5684638b | ||
![]() |
52781aaab8 | ||
![]() |
86ed715458 | ||
![]() |
567cd9b9e3 | ||
![]() |
acdf486b29 | ||
![]() |
e7e6a3834b | ||
![]() |
f4a0598f2f | ||
![]() |
45bb56808a | ||
![]() |
886c5795d4 | ||
![]() |
a766f511dd | ||
![]() |
a4e0a0824b | ||
![]() |
d52889f45c | ||
![]() |
5d7f6b25c0 | ||
![]() |
d8c83ac075 | ||
![]() |
ab8692a91e | ||
![]() |
938c55ca01 | ||
![]() |
ffcc7612ab | ||
![]() |
6ef24739fc | ||
![]() |
83d0f5dae9 | ||
![]() |
14dc979e44 | ||
![]() |
d53f01064c | ||
![]() |
0e31efac08 | ||
![]() |
79b6f658c3 | ||
![]() |
122980e06b | ||
![]() |
33b8235186 | ||
![]() |
d809ee92db | ||
![]() |
d0ed54a768 | ||
![]() |
339bf436d1 | ||
![]() |
f5f22edb6a | ||
![]() |
9b5da546be | ||
![]() |
cad6e0b0bf | ||
![]() |
94e90dbf85 | ||
![]() |
d8460d354a | ||
![]() |
b75f2798e6 | ||
![]() |
63c2a09ef6 | ||
![]() |
a6426bebe2 | ||
![]() |
96df6becfc | ||
![]() |
42620c4e60 | ||
![]() |
76c4ec7403 | ||
![]() |
a7c5376063 | ||
![]() |
00cc1b408a | ||
![]() |
8ec139f62a | ||
![]() |
e373b0ed9b | ||
![]() |
2727cf39b2 | ||
![]() |
8e13e6bfc1 | ||
![]() |
1c8c5a41f5 | ||
![]() |
5552fc6e97 | ||
![]() |
59e143ec25 | ||
![]() |
8fc822a6e7 | ||
![]() |
7f8420255d | ||
![]() |
ee3da3d1aa | ||
![]() |
689eead99a | ||
![]() |
1beb0cfa8c | ||
![]() |
70b6cc9682 | ||
![]() |
5d07eca327 | ||
![]() |
4459e06f74 | ||
![]() |
055a8818bc | ||
![]() |
70771ca9da | ||
![]() |
c8e4c366c1 |
635 changed files with 21391 additions and 16339 deletions
|
@ -5,7 +5,11 @@ DESCRIPTORS_BIN = { value = "out/rslib/proto/descriptors.bin", relative = true }
|
|||
# build script will append .exe if necessary
|
||||
PROTOC = { value = "out/extracted/protoc/bin/protoc", relative = true }
|
||||
PYO3_NO_PYTHON = "1"
|
||||
MACOSX_DEPLOYMENT_TARGET = "10.13.4"
|
||||
MACOSX_DEPLOYMENT_TARGET = "11"
|
||||
PYTHONDONTWRITEBYTECODE = "1" # prevent junk files on Windows
|
||||
|
||||
[term]
|
||||
color = "always"
|
||||
|
||||
[target.'cfg(all(target_env = "msvc", target_os = "windows"))']
|
||||
rustflags = ["-C", "target-feature=+crt-static"]
|
||||
|
|
2
.cursor/rules/building.md
Normal file
2
.cursor/rules/building.md
Normal file
|
@ -0,0 +1,2 @@
|
|||
- To build and check the project, use ./check in the root folder (or check.bat on Windows)
|
||||
- This will format files, then run lints and unit tests.
|
7
.cursor/rules/i18n.md
Normal file
7
.cursor/rules/i18n.md
Normal file
|
@ -0,0 +1,7 @@
|
|||
- We use the fluent system+code generation for translation.
|
||||
- New strings should be added to rslib/core/. Ask for the appropriate file if you're not sure.
|
||||
- Assuming a string addons-you-have-count has been added to addons.ftl, that string is accessible in our different languages as follows:
|
||||
- Python: from aqt.utils import tr; msg = tr.addons_you_have_count(count=3)
|
||||
- TypeScript: import * as tr from "@generated/ftl"; tr.addonsYouHaveCount({count: 3})
|
||||
- Rust: collection.tr.addons_you_have_count(3)
|
||||
- In Qt .ui files, strings that are marked as translatable will automatically use the registered ftl strings. So a QLabel with a title 'addons_you_have_count' that is marked as translatable will automatically use the translation defined in our addons.ftl file.
|
|
@ -5,9 +5,6 @@
|
|||
db-path = "~/.cargo/advisory-db"
|
||||
db-urls = ["https://github.com/rustsec/advisory-db"]
|
||||
ignore = [
|
||||
# pyoxidizer is stuck on an old ring version
|
||||
"RUSTSEC-2025-0009",
|
||||
"RUSTSEC-2025-0010",
|
||||
# burn depends on an unmaintained package 'paste'
|
||||
"RUSTSEC-2024-0436",
|
||||
]
|
||||
|
@ -17,12 +14,11 @@ allow = [
|
|||
"MIT",
|
||||
"Apache-2.0",
|
||||
"Apache-2.0 WITH LLVM-exception",
|
||||
"CDLA-Permissive-2.0",
|
||||
"ISC",
|
||||
"MPL-2.0",
|
||||
"Unicode-DFS-2016",
|
||||
"BSD-2-Clause",
|
||||
"BSD-3-Clause",
|
||||
"OpenSSL",
|
||||
"CC0-1.0",
|
||||
"Unlicense",
|
||||
"Zlib",
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
"ftl/usage",
|
||||
"licenses.json",
|
||||
".dmypy.json",
|
||||
"qt/bundle/PyOxidizer",
|
||||
"target",
|
||||
".mypy_cache",
|
||||
"extra",
|
||||
|
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -18,3 +18,5 @@ node_modules
|
|||
yarn-error.log
|
||||
ts/.svelte-kit
|
||||
.yarn
|
||||
.claude/settings.local.json
|
||||
.claude/user.md
|
||||
|
|
6
.gitmodules
vendored
6
.gitmodules
vendored
|
@ -6,9 +6,3 @@
|
|||
path = ftl/qt-repo
|
||||
url = https://github.com/ankitects/anki-desktop-ftl.git
|
||||
shallow = true
|
||||
[submodule "qt/bundle/PyOxidizer"]
|
||||
path = qt/bundle/PyOxidizer
|
||||
url = https://github.com/ankitects/PyOxidizer.git
|
||||
shallow = true
|
||||
update = none
|
||||
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
[settings]
|
||||
py_version=39
|
||||
known_first_party=anki,aqt,tests
|
||||
profile=black
|
||||
extend_skip=qt/bundle
|
|
@ -18,7 +18,7 @@ mypy_path =
|
|||
ftl,
|
||||
pylib/tools,
|
||||
python
|
||||
exclude = (qt/bundle/PyOxidizer|pylib/anki/_vendor)
|
||||
exclude = (pylib/anki/_vendor)
|
||||
|
||||
[mypy-anki.*]
|
||||
disallow_untyped_defs = True
|
||||
|
@ -165,3 +165,5 @@ ignore_missing_imports = True
|
|||
ignore_missing_imports = True
|
||||
[mypy-pip_system_certs.*]
|
||||
ignore_missing_imports = True
|
||||
[mypy-anki_audio]
|
||||
ignore_missing_imports = True
|
||||
|
|
48
.pylintrc
48
.pylintrc
|
@ -1,48 +0,0 @@
|
|||
[MASTER]
|
||||
ignore-patterns=.*_pb2.*
|
||||
persistent = no
|
||||
extension-pkg-whitelist=orjson,PyQt6
|
||||
init-hook="import sys; sys.path.extend(['pylib/anki/_vendor', 'out/qt'])"
|
||||
|
||||
[REPORTS]
|
||||
output-format=colorized
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
disable=
|
||||
R,
|
||||
line-too-long,
|
||||
too-many-lines,
|
||||
missing-function-docstring,
|
||||
missing-module-docstring,
|
||||
missing-class-docstring,
|
||||
import-outside-toplevel,
|
||||
wrong-import-position,
|
||||
wrong-import-order,
|
||||
fixme,
|
||||
unused-wildcard-import,
|
||||
attribute-defined-outside-init,
|
||||
redefined-builtin,
|
||||
wildcard-import,
|
||||
broad-except,
|
||||
bare-except,
|
||||
unused-argument,
|
||||
unused-variable,
|
||||
redefined-outer-name,
|
||||
global-statement,
|
||||
protected-access,
|
||||
arguments-differ,
|
||||
arguments-renamed,
|
||||
consider-using-f-string,
|
||||
invalid-name,
|
||||
broad-exception-raised
|
||||
|
||||
[BASIC]
|
||||
good-names =
|
||||
id,
|
||||
tr,
|
||||
db,
|
||||
ok,
|
||||
ip,
|
||||
|
||||
[IMPORTS]
|
||||
ignored-modules = anki.*_pb2, anki.sync_pb2, win32file,pywintypes,socket,win32pipe,pyaudio,anki.scheduler_pb2,anki.notetypes_pb2
|
1
.python-version
Normal file
1
.python-version
Normal file
|
@ -0,0 +1 @@
|
|||
3.13.5
|
93
.ruff.toml
93
.ruff.toml
|
@ -1,2 +1,91 @@
|
|||
target-version = "py39"
|
||||
extend-exclude = ["qt/bundle"]
|
||||
lint.select = [
|
||||
"E", # pycodestyle errors
|
||||
"F", # Pyflakes errors
|
||||
"PL", # Pylint rules
|
||||
"I", # Isort rules
|
||||
"ARG",
|
||||
# "UP", # pyupgrade
|
||||
# "B", # flake8-bugbear
|
||||
# "SIM", # flake8-simplify
|
||||
]
|
||||
|
||||
extend-exclude = ["*_pb2.py", "*_pb2.pyi"]
|
||||
|
||||
lint.ignore = [
|
||||
# Docstring rules (missing-*-docstring in pylint)
|
||||
"D100", # Missing docstring in public module
|
||||
"D101", # Missing docstring in public class
|
||||
"D103", # Missing docstring in public function
|
||||
|
||||
# Import rules (wrong-import-* in pylint)
|
||||
"E402", # Module level import not at top of file
|
||||
"E501", # Line too long
|
||||
|
||||
# pycodestyle rules
|
||||
"E741", # ambiguous-variable-name
|
||||
|
||||
# Comment rules (fixme in pylint)
|
||||
"FIX002", # Line contains TODO
|
||||
|
||||
# Pyflakes rules
|
||||
"F402", # import-shadowed-by-loop-var
|
||||
"F403", # undefined-local-with-import-star
|
||||
"F405", # undefined-local-with-import-star-usage
|
||||
|
||||
# Naming rules (invalid-name in pylint)
|
||||
"N801", # Class name should use CapWords convention
|
||||
"N802", # Function name should be lowercase
|
||||
"N803", # Argument name should be lowercase
|
||||
"N806", # Variable in function should be lowercase
|
||||
"N811", # Constant imported as non-constant
|
||||
"N812", # Lowercase imported as non-lowercase
|
||||
"N813", # Camelcase imported as lowercase
|
||||
"N814", # Camelcase imported as constant
|
||||
"N815", # Variable in class scope should not be mixedCase
|
||||
"N816", # Variable in global scope should not be mixedCase
|
||||
"N817", # CamelCase imported as acronym
|
||||
"N818", # Error suffix in exception names
|
||||
|
||||
# Pylint rules
|
||||
"PLW0603", # global-statement
|
||||
"PLW2901", # redefined-loop-name
|
||||
"PLC0415", # import-outside-top-level
|
||||
"PLR2004", # magic-value-comparison
|
||||
|
||||
# Exception handling (broad-except, bare-except in pylint)
|
||||
"BLE001", # Do not catch blind exception
|
||||
|
||||
# Argument rules (unused-argument in pylint)
|
||||
"ARG001", # Unused function argument
|
||||
"ARG002", # Unused method argument
|
||||
"ARG005", # Unused lambda argument
|
||||
|
||||
# Access rules (protected-access in pylint)
|
||||
"SLF001", # Private member accessed
|
||||
|
||||
# String formatting (consider-using-f-string in pylint)
|
||||
"UP032", # Use f-string instead of format call
|
||||
|
||||
# Exception rules (broad-exception-raised in pylint)
|
||||
"TRY301", # Abstract raise to an inner function
|
||||
|
||||
# Builtin shadowing (redefined-builtin in pylint)
|
||||
"A001", # Variable shadows a Python builtin
|
||||
"A002", # Argument shadows a Python builtin
|
||||
"A003", # Class attribute shadows a Python builtin
|
||||
]
|
||||
|
||||
[lint.per-file-ignores]
|
||||
"**/anki/*_pb2.py" = ["ALL"]
|
||||
|
||||
[lint.pep8-naming]
|
||||
ignore-names = ["id", "tr", "db", "ok", "ip"]
|
||||
|
||||
[lint.pylint]
|
||||
max-args = 12
|
||||
max-returns = 10
|
||||
max-branches = 35
|
||||
max-statements = 125
|
||||
|
||||
[lint.isort]
|
||||
known-first-party = ["anki", "aqt", "tests"]
|
||||
|
|
2
.version
2
.version
|
@ -1 +1 @@
|
|||
25.02.2
|
||||
25.09.2
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
"recommendations": [
|
||||
"dprint.dprint",
|
||||
"ms-python.python",
|
||||
"ms-python.black-formatter",
|
||||
"charliermarsh.ruff",
|
||||
"rust-lang.rust-analyzer",
|
||||
"svelte.svelte-vscode",
|
||||
"zxh404.vscode-proto3",
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
"editor.formatOnSave": true,
|
||||
"[python]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": true
|
||||
"source.organizeImports": "explicit"
|
||||
}
|
||||
},
|
||||
"files.watcherExclude": {
|
||||
|
@ -18,12 +18,12 @@
|
|||
"out/qt",
|
||||
"qt"
|
||||
],
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.provider": "charliermarsh.ruff",
|
||||
"python.linting.mypyEnabled": false,
|
||||
"python.analysis.diagnosticSeverityOverrides": {
|
||||
"reportMissingModuleSource": "none"
|
||||
},
|
||||
"rust-analyzer.checkOnSave.allTargets": false,
|
||||
"rust-analyzer.check.allTargets": false,
|
||||
"rust-analyzer.files.excludeDirs": [".bazel", "node_modules"],
|
||||
"rust-analyzer.procMacro.enable": true,
|
||||
// this formats 'use' blocks in a nicer way, but requires you to run
|
||||
|
@ -31,11 +31,13 @@
|
|||
"rust-analyzer.rustfmt.extraArgs": ["+nightly"],
|
||||
"search.exclude": {
|
||||
"**/node_modules": true,
|
||||
".bazel/**": true,
|
||||
"qt/bundle/PyOxidizer": true
|
||||
".bazel/**": true
|
||||
},
|
||||
"rust-analyzer.cargo.buildScripts.enable": true,
|
||||
"python.analysis.typeCheckingMode": "off",
|
||||
"python.analysis.exclude": [
|
||||
"out/launcher/**"
|
||||
],
|
||||
"terminal.integrated.env.windows": {
|
||||
"PATH": "c:\\msys64\\usr\\bin;${env:Path}"
|
||||
}
|
||||
|
|
|
@ -1 +1,2 @@
|
|||
nodeLinker: node-modules
|
||||
enableScripts: false
|
||||
|
|
86
CLAUDE.md
Normal file
86
CLAUDE.md
Normal file
|
@ -0,0 +1,86 @@
|
|||
# Claude Code Configuration
|
||||
|
||||
## Project Overview
|
||||
|
||||
Anki is a spaced repetition flashcard program with a multi-layered architecture. Main components:
|
||||
|
||||
- Web frontend: Svelte/TypeScript in ts/
|
||||
- PyQt GUI, which embeds the web components in aqt/
|
||||
- Python library which wraps our rust Layer (pylib/, with Rust module in pylib/rsbridge)
|
||||
- Core Rust layer in rslib/
|
||||
- Protobuf definitions in proto/ that are used by the different layers to
|
||||
talk to each other.
|
||||
|
||||
## Building/checking
|
||||
|
||||
./check (check.bat) will format the code and run the main build & checks.
|
||||
Please do this as a final step before marking a task as completed.
|
||||
|
||||
## Quick iteration
|
||||
|
||||
During development, you can build/check subsections of our code:
|
||||
|
||||
- Rust: 'cargo check'
|
||||
- Python: './tools/dmypy', and if wheel-related, './ninja wheels'
|
||||
- TypeScript/Svelte: './ninja check:svelte'
|
||||
|
||||
Be mindful that some changes (such as modifications to .proto files) may
|
||||
need a full build with './check' first.
|
||||
|
||||
## Build tooling
|
||||
|
||||
'./check' and './ninja' invoke our build system, which is implemented in build/. It takes care of downloading required deps and invoking our build
|
||||
steps.
|
||||
|
||||
## Translations
|
||||
|
||||
ftl/ contains our Fluent translation files. We have scripts in rslib/i18n
|
||||
to auto-generate an API for Rust, TypeScript and Python so that our code can
|
||||
access the translations in a type-safe manner. Changes should be made to
|
||||
ftl/core or ftl/qt. Except for features specific to our Qt interface, prefer
|
||||
the core module. When adding new strings, confirm the appropriate ftl file
|
||||
first, and try to match the existing style.
|
||||
|
||||
## Protobuf and IPC
|
||||
|
||||
Our build scripts use the .proto files to define our Rust library's
|
||||
non-Rust API. pylib/rsbridge exposes that API, and _backend.py exposes
|
||||
snake_case methods for each protobuf RPC that call into the API.
|
||||
Similar tooling creates a @generated/backend TypeScript module for
|
||||
communicating with the Rust backend (which happens over POST requests).
|
||||
|
||||
## Fixing errors
|
||||
|
||||
When dealing with build errors or failing tests, invoke 'check' or one
|
||||
of the quick iteration commands regularly. This helps verify your changes
|
||||
are correct. To locate other instances of a problem, run the check again -
|
||||
don't attempt to grep the codebase.
|
||||
|
||||
## Ignores
|
||||
|
||||
The files in out/ are auto-generated. Mostly you should ignore that folder,
|
||||
though you may sometimes find it useful to view out/{pylib/anki,qt/_aqt,ts/lib/generated} when dealing with cross-language communication or our other generated sourcecode.
|
||||
|
||||
## Launcher/installer
|
||||
|
||||
The code for our launcher is in qt/launcher, with separate code for each
|
||||
platform.
|
||||
|
||||
## Rust dependencies
|
||||
|
||||
Prefer adding to the root workspace, and using dep.workspace = true in the individual Rust project.
|
||||
|
||||
## Rust utilities
|
||||
|
||||
rslib/{process,io} contain some helpers for file and process operations,
|
||||
which provide better error messages/context and some ergonomics. Use them
|
||||
when possible.
|
||||
|
||||
## Rust error handling
|
||||
|
||||
in rslib, use error/mod.rs's AnkiError/Result and snafu. In our other Rust modules, prefer anyhow + additional context where appropriate. Unwrapping
|
||||
in build scripts/tests is fine.
|
||||
|
||||
## Individual preferences
|
||||
|
||||
See @.claude/user.md
|
38
CONTRIBUTORS
38
CONTRIBUTORS
|
@ -49,6 +49,7 @@ Sander Santema <github.com/sandersantema/>
|
|||
Thomas Brownback <https://github.com/brownbat/>
|
||||
Andrew Gaul <andrew@gaul.org>
|
||||
kenden
|
||||
Emil Hamrin <github.com/e-hamrin>
|
||||
Nickolay Yudin <kelciour@gmail.com>
|
||||
neitrinoweb <github.com/neitrinoweb/>
|
||||
Andreas Reis <github.com/nwwt>
|
||||
|
@ -63,6 +64,7 @@ Jakub Kaczmarzyk <jakub.kaczmarzyk@gmail.com>
|
|||
Akshara Balachandra <akshara.bala.18@gmail.com>
|
||||
lukkea <github.com/lukkea/>
|
||||
David Allison <davidallisongithub@gmail.com>
|
||||
David Allison <62114487+david-allison@users.noreply.github.com>
|
||||
Tsung-Han Yu <johan456789@gmail.com>
|
||||
Piotr Kubowicz <piotr.kubowicz@gmail.com>
|
||||
RumovZ <gp5glkw78@relay.firefox.com>
|
||||
|
@ -98,7 +100,7 @@ gnnoh <gerongfenh@gmail.com>
|
|||
Sachin Govind <sachin.govind.too@gmail.com>
|
||||
Bruce Harris <github.com/bruceharris>
|
||||
Patric Cunha <patricc@agap2.pt>
|
||||
Brayan Oliveira <github.com/BrayanDSO>
|
||||
Brayan Oliveira <69634269+BrayanDSO@users.noreply.github.com>
|
||||
Luka Warren <github.com/lukawarren>
|
||||
wisherhxl <wisherhxl@gmail.com>
|
||||
dobefore <1432338032@qq.com>
|
||||
|
@ -148,7 +150,7 @@ user1823 <92206575+user1823@users.noreply.github.com>
|
|||
Gustaf Carefall <https://github.com/Gustaf-C>
|
||||
virinci <github.com/virinci>
|
||||
snowtimeglass <snowtimeglass@gmail.com>
|
||||
brishtibheja <sorata225yume@gmail.com>
|
||||
brishtibheja <136738526+brishtibheja@users.noreply.github.com>
|
||||
Ben Olson <github.com/grepgrok>
|
||||
Akash Reddy <akashreddy2003@gmail.com>
|
||||
Lucio Sauer <watermanpaint@posteo.net>
|
||||
|
@ -201,18 +203,46 @@ Dongjin Ouyang <1113117424@qq.com>
|
|||
Sawan Sunar <sawansunar24072002@gmail.com>
|
||||
hideo aoyama <https://github.com/boukendesho>
|
||||
Ross Brown <rbrownwsws@googlemail.com>
|
||||
🦙 <github.com/iamllama>
|
||||
🦙 <gh@siid.sh>
|
||||
Lukas Sommer <sommerluk@gmail.com>
|
||||
Luca Auer <lolle2000.la@gmail.com>
|
||||
Lukas Sommer <sommerluk@users.noreply.github.com>
|
||||
Niclas Heinz <nheinz@hpost.net>
|
||||
Omar Kohl <omarkohl@posteo.net>
|
||||
David Elizalde <david.elizalde.r.a@gmail.com>
|
||||
beyondcompute <beyondcompute@gmail.com>
|
||||
Yuki <https://github.com/YukiNagat0>
|
||||
wackbyte <wackbyte@protonmail.com>
|
||||
GithubAnon0000 <GithubAnon0000@users.noreply.github.com>
|
||||
Mike Hardy <github@mikehardy.net>
|
||||
Danika_Dakika <https://github.com/Danika-Dakika>
|
||||
Mumtaz Hajjo Alrifai <mumtazrifai@protonmail.com>
|
||||
Thomas Graves <fate@hey.com>
|
||||
Jakub Fidler <jakub.fidler@protonmail.com>
|
||||
Valerie Enfys <val@unidentified.systems>
|
||||
Julien Chol <https://github.com/chel-ou>
|
||||
ikkz <ylei.mk@gmail.com>
|
||||
derivativeoflog7 <https://github.com/derivativeoflog7>
|
||||
rreemmii-dev <https://github.com/rreemmii-dev>
|
||||
babofitos <https://github.com/babofitos>
|
||||
Jonathan Schoreels <https://github.com/JSchoreels>
|
||||
JL710
|
||||
Matt Brubeck <mbrubeck@limpet.net>
|
||||
Yaoliang Chen <yaoliang.ch@gmail.com>
|
||||
KolbyML <https://github.com/KolbyML>
|
||||
Adnane Taghi <dev@soleuniverse.me>
|
||||
Spiritual Father <https://github.com/spiritualfather>
|
||||
Emmanuel Ferdman <https://github.com/emmanuel-ferdman>
|
||||
Sunong2008 <https://github.com/Sunrongguo2008>
|
||||
Marvin Kopf <marvinkopf@outlook.com>
|
||||
Kevin Nakamura <grinkers@grinkers.net>
|
||||
Bradley Szoke <bradleyszoke@gmail.com>
|
||||
jcznk <https://github.com/jcznk>
|
||||
Thomas Rixen <thomas.rixen@student.uclouvain.be>
|
||||
Siyuan Mattuwu Yan <syan4@ualberta.ca>
|
||||
Lee Doughty <32392044+leedoughty@users.noreply.github.com>
|
||||
memchr <memchr@proton.me>
|
||||
Max Romanowski <maxr777@proton.me>
|
||||
Aldlss <ayaldlss@gmail.com>
|
||||
|
||||
********************
|
||||
|
||||
|
|
4120
Cargo.lock
generated
4120
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
139
Cargo.toml
139
Cargo.toml
|
@ -12,8 +12,7 @@ members = [
|
|||
"build/runner",
|
||||
"ftl",
|
||||
"pylib/rsbridge",
|
||||
"qt/bundle/mac",
|
||||
"qt/bundle/win",
|
||||
"qt/launcher",
|
||||
"rslib",
|
||||
"rslib/i18n",
|
||||
"rslib/io",
|
||||
|
@ -23,7 +22,6 @@ members = [
|
|||
"rslib/sync",
|
||||
"tools/minilints",
|
||||
]
|
||||
exclude = ["qt/bundle"]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.dependencies.percent-encoding-iri]
|
||||
|
@ -35,9 +33,8 @@ git = "https://github.com/ankitects/linkcheck.git"
|
|||
rev = "184b2ca50ed39ca43da13f0b830a463861adb9ca"
|
||||
|
||||
[workspace.dependencies.fsrs]
|
||||
version = "=2.0.3"
|
||||
version = "5.1.0"
|
||||
# git = "https://github.com/open-spaced-repetition/fsrs-rs.git"
|
||||
# rev = "58ca25ed2bc4bb1dc376208bbcaed7f5a501b941"
|
||||
# path = "../open-spaced-repetition/fsrs-rs"
|
||||
|
||||
[workspace.dependencies]
|
||||
|
@ -54,99 +51,101 @@ ninja_gen = { "path" = "build/ninja_gen" }
|
|||
unicase = "=2.6.0" # any changes could invalidate sqlite indexes
|
||||
|
||||
# normal
|
||||
ammonia = "4.0.0"
|
||||
anyhow = "1.0.90"
|
||||
apple-bundles = "0.17.0"
|
||||
async-compression = { version = "0.4.17", features = ["zstd", "tokio"] }
|
||||
ammonia = "4.1.0"
|
||||
anyhow = "1.0.98"
|
||||
async-compression = { version = "0.4.24", features = ["zstd", "tokio"] }
|
||||
async-stream = "0.3.6"
|
||||
async-trait = "0.1.83"
|
||||
axum = { version = "0.7", features = ["multipart", "macros"] }
|
||||
axum-client-ip = "0.6"
|
||||
axum-extra = { version = "0.9.4", features = ["typed-header"] }
|
||||
blake3 = "1.5.4"
|
||||
bytes = "1.7.2"
|
||||
camino = "1.1.9"
|
||||
chrono = { version = "0.4.38", default-features = false, features = ["std", "clock"] }
|
||||
clap = { version = "4.5.20", features = ["derive"] }
|
||||
coarsetime = "0.1.34"
|
||||
convert_case = "0.6.0"
|
||||
criterion = { version = "0.5.1" }
|
||||
csv = "1.3.0"
|
||||
data-encoding = "2.6.0"
|
||||
async-trait = "0.1.88"
|
||||
axum = { version = "0.8.4", features = ["multipart", "macros"] }
|
||||
axum-client-ip = "1.1.3"
|
||||
axum-extra = { version = "0.10.1", features = ["typed-header"] }
|
||||
bitflags = "2.9.1"
|
||||
blake3 = "1.8.2"
|
||||
bytes = "1.10.1"
|
||||
camino = "1.1.10"
|
||||
chrono = { version = "0.4.41", default-features = false, features = ["std", "clock"] }
|
||||
clap = { version = "4.5.40", features = ["derive"] }
|
||||
coarsetime = "0.1.36"
|
||||
convert_case = "0.8.0"
|
||||
criterion = { version = "0.6.0" }
|
||||
csv = "1.3.1"
|
||||
data-encoding = "2.9.0"
|
||||
difflib = "0.4.0"
|
||||
dirs = "5.0.1"
|
||||
dirs = "6.0.0"
|
||||
dunce = "1.0.5"
|
||||
embed-resource = "3.0.4"
|
||||
envy = "0.4.2"
|
||||
flate2 = "1.0.34"
|
||||
fluent = "0.16.1"
|
||||
fluent-bundle = "0.15.3"
|
||||
fluent-syntax = "0.11.1"
|
||||
flate2 = "1.1.2"
|
||||
fluent = "0.17.0"
|
||||
fluent-bundle = "0.16.0"
|
||||
fluent-syntax = "0.12.0"
|
||||
fnv = "1.0.7"
|
||||
futures = "0.3.31"
|
||||
glob = "0.3.1"
|
||||
globset = "0.4.15"
|
||||
globset = "0.4.16"
|
||||
hex = "0.4.3"
|
||||
htmlescape = "0.3.1"
|
||||
hyper = "1"
|
||||
id_tree = "1.8.0"
|
||||
inflections = "1.1.1"
|
||||
intl-memoizer = "0.5.2"
|
||||
itertools = "0.13.0"
|
||||
intl-memoizer = "0.5.3"
|
||||
itertools = "0.14.0"
|
||||
junction = "1.2.0"
|
||||
lazy_static = "1.5.0"
|
||||
libc = "0.2"
|
||||
libc-stdhandle = "0.1"
|
||||
maplit = "1.0.2"
|
||||
nom = "7.1.3"
|
||||
nom = "8.0.0"
|
||||
num-format = "0.4.4"
|
||||
num_cpus = "1.16.0"
|
||||
num_cpus = "1.17.0"
|
||||
num_enum = "0.7.3"
|
||||
once_cell = "1.20.2"
|
||||
once_cell = "1.21.3"
|
||||
pbkdf2 = { version = "0.12", features = ["simple"] }
|
||||
phf = { version = "0.11.2", features = ["macros"] }
|
||||
pin-project = "1.1.6"
|
||||
plist = "1.7.0"
|
||||
prettyplease = "0.2.24"
|
||||
phf = { version = "0.11.3", features = ["macros"] }
|
||||
pin-project = "1.1.10"
|
||||
prettyplease = "0.2.34"
|
||||
prost = "0.13"
|
||||
prost-build = "0.13"
|
||||
prost-reflect = "0.14"
|
||||
prost-reflect = "0.14.7"
|
||||
prost-types = "0.13"
|
||||
pulldown-cmark = "0.9.6"
|
||||
pyo3 = { version = "0.24", features = ["extension-module", "abi3", "abi3-py39"] }
|
||||
rand = "0.8.5"
|
||||
regex = "1.11.0"
|
||||
reqwest = { version = "0.12.8", default-features = false, features = ["json", "socks", "stream", "multipart"] }
|
||||
rusqlite = { version = "0.30.0", features = ["trace", "functions", "collation", "bundled"] }
|
||||
pulldown-cmark = "0.13.0"
|
||||
pyo3 = { version = "0.25.1", features = ["extension-module", "abi3", "abi3-py39"] }
|
||||
rand = "0.9.1"
|
||||
rayon = "1.10.0"
|
||||
regex = "1.11.1"
|
||||
reqwest = { version = "0.12.20", default-features = false, features = ["json", "socks", "stream", "multipart"] }
|
||||
rusqlite = { version = "0.36.0", features = ["trace", "functions", "collation", "bundled"] }
|
||||
rustls-pemfile = "2.2.0"
|
||||
scopeguard = "1.2.0"
|
||||
serde = { version = "1.0.210", features = ["derive"] }
|
||||
serde-aux = "4.5.0"
|
||||
serde_json = "1.0.132"
|
||||
serde_repr = "0.1.19"
|
||||
serde_tuple = "0.5.0"
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde-aux = "4.7.0"
|
||||
serde_json = "1.0.140"
|
||||
serde_repr = "0.1.20"
|
||||
serde_tuple = "1.1.0"
|
||||
sha1 = "0.10.6"
|
||||
sha2 = { version = "0.10.8" }
|
||||
simple-file-manifest = "0.11.0"
|
||||
snafu = { version = "0.8.5", features = ["rust_1_61"] }
|
||||
strum = { version = "0.26.3", features = ["derive"] }
|
||||
syn = { version = "2.0.82", features = ["parsing", "printing"] }
|
||||
tar = "0.4.42"
|
||||
tempfile = "3.13.0"
|
||||
sha2 = { version = "0.10.9" }
|
||||
snafu = { version = "0.8.6", features = ["rust_1_61"] }
|
||||
strum = { version = "0.27.1", features = ["derive"] }
|
||||
syn = { version = "2.0.103", features = ["parsing", "printing"] }
|
||||
tar = "0.4.44"
|
||||
tempfile = "3.20.0"
|
||||
termcolor = "1.4.1"
|
||||
tokio = { version = "1.40", features = ["fs", "rt-multi-thread", "macros", "signal"] }
|
||||
tokio-util = { version = "0.7.12", features = ["io"] }
|
||||
tower-http = { version = "0.5", features = ["trace"] }
|
||||
tracing = { version = "0.1.40", features = ["max_level_trace", "release_max_level_debug"] }
|
||||
tokio = { version = "1.45", features = ["fs", "rt-multi-thread", "macros", "signal"] }
|
||||
tokio-util = { version = "0.7.15", features = ["io"] }
|
||||
tower-http = { version = "0.6.6", features = ["trace"] }
|
||||
tracing = { version = "0.1.41", features = ["max_level_trace", "release_max_level_debug"] }
|
||||
tracing-appender = "0.2.3"
|
||||
tracing-subscriber = { version = "0.3.18", features = ["fmt", "env-filter"] }
|
||||
tugger-windows-codesign = "0.10.0"
|
||||
unic-langid = { version = "0.9.5", features = ["macros"] }
|
||||
tracing-subscriber = { version = "0.3.20", features = ["fmt", "env-filter"] }
|
||||
unic-langid = { version = "0.9.6", features = ["macros"] }
|
||||
unic-ucd-category = "0.9.0"
|
||||
unicode-normalization = "0.1.24"
|
||||
walkdir = "2.5.0"
|
||||
which = "5.0.0"
|
||||
wiremock = "0.6.2"
|
||||
which = "8.0.0"
|
||||
widestring = "1.1.0"
|
||||
winapi = { version = "0.3", features = ["wincon", "winreg"] }
|
||||
windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Media_Core", "Foundation_Collections", "Storage_Streams", "Win32_System_Console", "Win32_System_Registry", "Win32_System_SystemInformation", "Win32_Foundation", "Win32_UI_Shell", "Wdk_System_SystemServices"] }
|
||||
wiremock = "0.6.3"
|
||||
xz2 = "0.1.7"
|
||||
zip = { version = "0.6.6", default-features = false, features = ["deflate", "time"] }
|
||||
zstd = { version = "0.13.2", features = ["zstdmt"] }
|
||||
zip = { version = "4.1.0", default-features = false, features = ["deflate", "time"] }
|
||||
zstd = { version = "0.13.3", features = ["zstdmt"] }
|
||||
|
||||
# Apply mild optimizations to our dependencies in dev mode, which among other things
|
||||
# improves sha2 performance by about 21x. Opt 1 chosen due to
|
||||
|
|
2
LICENSE
2
LICENSE
|
@ -6,8 +6,6 @@ The following included source code items use a license other than AGPL3:
|
|||
|
||||
In the pylib folder:
|
||||
|
||||
* The SuperMemo importer: GPL3 and 0BSD.
|
||||
* The Pauker importer: BSD-3.
|
||||
* statsbg.py: CC BY 4.0.
|
||||
|
||||
In the qt folder:
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Anki
|
||||
# Anki®
|
||||
|
||||
[](https://buildkite.com/ankitects/anki-ci)
|
||||
|
||||
|
|
|
@ -27,7 +27,6 @@ pub fn build_and_check_aqt(build: &mut Build) -> Result<()> {
|
|||
build_forms(build)?;
|
||||
build_generated_sources(build)?;
|
||||
build_data_folder(build)?;
|
||||
build_macos_helper(build)?;
|
||||
build_wheel(build)?;
|
||||
check_python(build)?;
|
||||
Ok(())
|
||||
|
@ -39,7 +38,6 @@ fn build_forms(build: &mut Build) -> Result<()> {
|
|||
let mut py_files = vec![];
|
||||
for path in ui_files.resolve() {
|
||||
let outpath = outdir.join(path.file_name().unwrap()).into_string();
|
||||
py_files.push(outpath.replace(".ui", "_qt5.py"));
|
||||
py_files.push(outpath.replace(".ui", "_qt6.py"));
|
||||
}
|
||||
build.add_action(
|
||||
|
@ -337,47 +335,25 @@ impl BuildAction for BuildThemedIcon<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn build_macos_helper(build: &mut Build) -> Result<()> {
|
||||
if cfg!(target_os = "macos") {
|
||||
build.add_action(
|
||||
"qt:aqt:data:lib:libankihelper",
|
||||
RunCommand {
|
||||
command: ":pyenv:bin",
|
||||
args: "$script $out $in",
|
||||
inputs: hashmap! {
|
||||
"script" => inputs!["qt/mac/helper_build.py"],
|
||||
"in" => inputs![glob!["qt/mac/*.swift"]],
|
||||
"" => inputs!["out/env"],
|
||||
},
|
||||
outputs: hashmap! {
|
||||
"out" => vec!["qt/_aqt/data/lib/libankihelper.dylib"],
|
||||
},
|
||||
},
|
||||
)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_wheel(build: &mut Build) -> Result<()> {
|
||||
build.add_action(
|
||||
"wheels:aqt",
|
||||
BuildWheel {
|
||||
name: "aqt",
|
||||
version: anki_version(),
|
||||
src_folder: "qt/aqt",
|
||||
gen_folder: "$builddir/qt/_aqt",
|
||||
platform: None,
|
||||
deps: inputs![":qt:aqt", glob!("qt/aqt/**"), "python/requirements.aqt.in"],
|
||||
deps: inputs![
|
||||
":qt:aqt",
|
||||
glob!("qt/aqt/**"),
|
||||
"qt/pyproject.toml",
|
||||
"qt/hatch_build.py"
|
||||
],
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn check_python(build: &mut Build) -> Result<()> {
|
||||
python_format(
|
||||
build,
|
||||
"qt",
|
||||
inputs![glob!("qt/**/*.py", "qt/bundle/PyOxidizer/**")],
|
||||
)?;
|
||||
python_format(build, "qt", inputs![glob!("qt/**/*.py")])?;
|
||||
|
||||
build.add_action(
|
||||
"check:pytest:aqt",
|
||||
|
|
|
@ -1,442 +0,0 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::env;
|
||||
|
||||
use anyhow::Result;
|
||||
use ninja_gen::action::BuildAction;
|
||||
use ninja_gen::archives::download_and_extract;
|
||||
use ninja_gen::archives::empty_manifest;
|
||||
use ninja_gen::archives::with_exe;
|
||||
use ninja_gen::archives::OnlineArchive;
|
||||
use ninja_gen::archives::Platform;
|
||||
use ninja_gen::build::BuildProfile;
|
||||
use ninja_gen::cargo::CargoBuild;
|
||||
use ninja_gen::cargo::RustOutput;
|
||||
use ninja_gen::git::SyncSubmodule;
|
||||
use ninja_gen::glob;
|
||||
use ninja_gen::input::BuildInput;
|
||||
use ninja_gen::inputs;
|
||||
use ninja_gen::python::PythonEnvironment;
|
||||
use ninja_gen::Build;
|
||||
use ninja_gen::Utf8Path;
|
||||
|
||||
use crate::anki_version;
|
||||
use crate::platform::overriden_python_target_platform;
|
||||
use crate::platform::overriden_rust_target_triple;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
enum DistKind {
|
||||
Standard,
|
||||
}
|
||||
|
||||
impl DistKind {
|
||||
fn folder_name(&self) -> &'static str {
|
||||
match self {
|
||||
DistKind::Standard => "std",
|
||||
}
|
||||
}
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
match self {
|
||||
DistKind::Standard => "standard",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_bundle(build: &mut Build) -> Result<()> {
|
||||
// install into venv
|
||||
setup_primary_venv(build)?;
|
||||
install_anki_wheels(build)?;
|
||||
|
||||
// bundle venv into output binary + extra_files
|
||||
build_pyoxidizer(build)?;
|
||||
build_artifacts(build)?;
|
||||
build_binary(build)?;
|
||||
|
||||
// package up outputs with Qt/other deps
|
||||
download_dist_folder_deps(build)?;
|
||||
build_dist_folder(build, DistKind::Standard)?;
|
||||
|
||||
build_packages(build)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn targetting_macos_arm() -> bool {
|
||||
cfg!(all(target_os = "macos", target_arch = "aarch64"))
|
||||
&& overriden_python_target_platform().is_none()
|
||||
}
|
||||
|
||||
const WIN_AUDIO: OnlineArchive = OnlineArchive {
|
||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-02-09/audio-win-amd64.tar.gz",
|
||||
sha256: "0815a601baba05e03bc36b568cdc2332b1cf4aa17125fc33c69de125f8dd687f",
|
||||
};
|
||||
|
||||
const MAC_ARM_AUDIO: OnlineArchive = OnlineArchive {
|
||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-05-26/audio-mac-arm64.tar.gz",
|
||||
sha256: "f6c4af9be59ae1c82a16f5c6307f13cbf31b49ad7b69ce1cb6e0e7b403cfdb8f",
|
||||
};
|
||||
|
||||
const MAC_AMD_AUDIO: OnlineArchive = OnlineArchive {
|
||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-05-26/audio-mac-amd64.tar.gz",
|
||||
sha256: "ecbb3c878805cdd58b1a0b8e3fd8c753b8ce3ad36c8b5904a79111f9db29ff42",
|
||||
};
|
||||
|
||||
const MAC_ARM_QT6: OnlineArchive = OnlineArchive {
|
||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2024-02-29/pyqt6.6-mac-arm64.tar.zst",
|
||||
sha256: "9b2ade4ae9b80506689062845e83e8c60f7fa9843545bf7bb2d11d3e2f105878",
|
||||
};
|
||||
|
||||
const MAC_AMD_QT6: OnlineArchive = OnlineArchive {
|
||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2024-02-29/pyqt6.6-mac-amd64.tar.zst",
|
||||
sha256: "dbd0871e4da22820d1fa9ab29220d631467d1178038dcab4b15169ad7f499b1b",
|
||||
};
|
||||
|
||||
const LINUX_QT_PLUGINS: OnlineArchive = OnlineArchive {
|
||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2023-05-02/qt-plugins-linux-amd64.tar.gz",
|
||||
sha256: "66bb568aca7242bc55ad419bf5c96755ca15d2a743e1c3a09cba8b83230b138b",
|
||||
};
|
||||
|
||||
const NSIS_PLUGINS: OnlineArchive = OnlineArchive {
|
||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2023-05-19/nsis.tar.zst",
|
||||
sha256: "6133f730ece699de19714d0479c73bc848647d277e9cc80dda9b9ebe532b40a8",
|
||||
};
|
||||
|
||||
fn download_dist_folder_deps(build: &mut Build) -> Result<()> {
|
||||
let mut bundle_deps = vec![":wheels"];
|
||||
if cfg!(windows) {
|
||||
download_and_extract(build, "win_amd64_audio", WIN_AUDIO, empty_manifest())?;
|
||||
download_and_extract(build, "nsis_plugins", NSIS_PLUGINS, empty_manifest())?;
|
||||
bundle_deps.extend([":extract:win_amd64_audio", ":extract:nsis_plugins"]);
|
||||
} else if cfg!(target_os = "macos") {
|
||||
if targetting_macos_arm() {
|
||||
download_and_extract(build, "mac_arm_audio", MAC_ARM_AUDIO, empty_manifest())?;
|
||||
download_and_extract(build, "mac_arm_qt6", MAC_ARM_QT6, empty_manifest())?;
|
||||
bundle_deps.extend([":extract:mac_arm_audio", ":extract:mac_arm_qt6"]);
|
||||
} else {
|
||||
download_and_extract(build, "mac_amd_audio", MAC_AMD_AUDIO, empty_manifest())?;
|
||||
download_and_extract(build, "mac_amd_qt6", MAC_AMD_QT6, empty_manifest())?;
|
||||
bundle_deps.extend([":extract:mac_amd_audio", ":extract:mac_amd_qt6"]);
|
||||
}
|
||||
} else {
|
||||
download_and_extract(
|
||||
build,
|
||||
"linux_qt_plugins",
|
||||
LINUX_QT_PLUGINS,
|
||||
empty_manifest(),
|
||||
)?;
|
||||
bundle_deps.extend([":extract:linux_qt_plugins"]);
|
||||
}
|
||||
build.add_dependency(
|
||||
"bundle:deps",
|
||||
inputs![bundle_deps
|
||||
.iter()
|
||||
.map(ToString::to_string)
|
||||
.collect::<Vec<_>>()],
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct Venv {
|
||||
label: &'static str,
|
||||
path_without_builddir: &'static str,
|
||||
}
|
||||
|
||||
impl Venv {
|
||||
fn label_as_target(&self, suffix: &str) -> String {
|
||||
format!(":{}{suffix}", self.label)
|
||||
}
|
||||
}
|
||||
|
||||
const PRIMARY_VENV: Venv = Venv {
|
||||
label: "bundle:pyenv",
|
||||
path_without_builddir: "bundle/pyenv",
|
||||
};
|
||||
|
||||
fn setup_primary_venv(build: &mut Build) -> Result<()> {
|
||||
let mut qt6_reqs = inputs![
|
||||
"python/requirements.bundle.txt",
|
||||
"python/requirements.qt6_6.txt",
|
||||
];
|
||||
if cfg!(windows) {
|
||||
qt6_reqs = inputs![qt6_reqs, "python/requirements.win.txt"];
|
||||
}
|
||||
build.add_action(
|
||||
PRIMARY_VENV.label,
|
||||
PythonEnvironment {
|
||||
folder: PRIMARY_VENV.path_without_builddir,
|
||||
base_requirements_txt: "python/requirements.base.txt".into(),
|
||||
requirements_txt: qt6_reqs,
|
||||
extra_binary_exports: &[],
|
||||
},
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct InstallAnkiWheels {
|
||||
venv: Venv,
|
||||
}
|
||||
|
||||
impl BuildAction for InstallAnkiWheels {
|
||||
fn command(&self) -> &str {
|
||||
"$pip install --force-reinstall --no-deps $in"
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
||||
build.add_inputs("pip", inputs![self.venv.label_as_target(":pip")]);
|
||||
build.add_inputs("in", inputs![":wheels"]);
|
||||
build.add_output_stamp("bundle/wheels.stamp");
|
||||
}
|
||||
}
|
||||
|
||||
fn install_anki_wheels(build: &mut Build) -> Result<()> {
|
||||
build.add_action(
|
||||
"bundle:add_wheels:qt6",
|
||||
InstallAnkiWheels { venv: PRIMARY_VENV },
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_pyoxidizer(build: &mut Build) -> Result<()> {
|
||||
let offline_build = env::var("OFFLINE_BUILD").is_ok();
|
||||
|
||||
build.add_action(
|
||||
"bundle:pyoxidizer:repo",
|
||||
SyncSubmodule {
|
||||
path: "qt/bundle/PyOxidizer",
|
||||
offline_build,
|
||||
},
|
||||
)?;
|
||||
let target =
|
||||
overriden_rust_target_triple().unwrap_or_else(|| Platform::current().as_rust_triple());
|
||||
let output_bin = format!("bundle/rust/{target}/release/pyoxidizer",);
|
||||
build.add_action(
|
||||
"bundle:pyoxidizer:bin",
|
||||
CargoBuild {
|
||||
inputs: inputs![
|
||||
":bundle:pyoxidizer:repo",
|
||||
"out/env",
|
||||
glob!["qt/bundle/PyOxidizer/**"]
|
||||
],
|
||||
// can't use ::Binary() here, as we're in a separate workspace
|
||||
outputs: &[RustOutput::Data("bin", &with_exe(&output_bin))],
|
||||
target: Some(target),
|
||||
extra_args: &format!(
|
||||
"--manifest-path={} --target-dir={} -p pyoxidizer",
|
||||
"qt/bundle/PyOxidizer/Cargo.toml", "$builddir/bundle/rust"
|
||||
),
|
||||
release_override: Some(BuildProfile::Release),
|
||||
},
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct BuildArtifacts {}
|
||||
|
||||
impl BuildAction for BuildArtifacts {
|
||||
fn command(&self) -> &str {
|
||||
"$runner build-artifacts $bundle_root $pyoxidizer_bin"
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
||||
build.add_inputs("pyoxidizer_bin", inputs![":bundle:pyoxidizer:bin"]);
|
||||
build.add_inputs("", inputs![PRIMARY_VENV.label_as_target("")]);
|
||||
build.add_inputs("", inputs![":bundle:add_wheels:qt6", glob!["qt/bundle/**"]]);
|
||||
build.add_variable("bundle_root", "$builddir/bundle");
|
||||
build.add_outputs_ext(
|
||||
"pyo3_config",
|
||||
vec!["bundle/artifacts/pyo3-build-config-file.txt"],
|
||||
true,
|
||||
);
|
||||
}
|
||||
|
||||
fn check_output_timestamps(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
fn build_artifacts(build: &mut Build) -> Result<()> {
|
||||
build.add_action("bundle:artifacts", BuildArtifacts {})
|
||||
}
|
||||
|
||||
struct BuildBundle {}
|
||||
|
||||
impl BuildAction for BuildBundle {
|
||||
fn command(&self) -> &str {
|
||||
"$runner build-bundle-binary"
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
||||
build.add_inputs("", inputs![":bundle:artifacts", glob!["qt/bundle/**"]]);
|
||||
build.add_outputs(
|
||||
"",
|
||||
vec![RustOutput::Binary("anki").path(
|
||||
Utf8Path::new("$builddir/bundle/rust"),
|
||||
Some(
|
||||
overriden_rust_target_triple()
|
||||
.unwrap_or_else(|| Platform::current().as_rust_triple()),
|
||||
),
|
||||
// our pyoxidizer bin uses lto on the release profile
|
||||
BuildProfile::Release,
|
||||
)],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn build_binary(build: &mut Build) -> Result<()> {
|
||||
build.add_action("bundle:binary", BuildBundle {})
|
||||
}
|
||||
|
||||
struct BuildDistFolder {
|
||||
kind: DistKind,
|
||||
deps: BuildInput,
|
||||
}
|
||||
|
||||
impl BuildAction for BuildDistFolder {
|
||||
fn command(&self) -> &str {
|
||||
"$runner build-dist-folder $kind $out_folder "
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
||||
build.add_inputs("", &self.deps);
|
||||
build.add_variable("kind", self.kind.name());
|
||||
let folder = match self.kind {
|
||||
DistKind::Standard => "bundle/std",
|
||||
};
|
||||
build.add_outputs("out_folder", vec![folder]);
|
||||
build.add_outputs("stamp", vec![format!("{folder}.stamp")]);
|
||||
}
|
||||
|
||||
fn check_output_timestamps(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
fn build_dist_folder(build: &mut Build, kind: DistKind) -> Result<()> {
|
||||
let deps = inputs![":bundle:deps", ":bundle:binary", glob!["qt/bundle/**"]];
|
||||
let group = match kind {
|
||||
DistKind::Standard => "bundle:folder:std",
|
||||
};
|
||||
build.add_action(group, BuildDistFolder { kind, deps })
|
||||
}
|
||||
|
||||
fn build_packages(build: &mut Build) -> Result<()> {
|
||||
if cfg!(windows) {
|
||||
build_windows_installers(build)
|
||||
} else if cfg!(target_os = "macos") {
|
||||
build_mac_app(build, DistKind::Standard)?;
|
||||
build_dmgs(build)
|
||||
} else {
|
||||
build_tarball(build, DistKind::Standard)
|
||||
}
|
||||
}
|
||||
|
||||
struct BuildTarball {
|
||||
kind: DistKind,
|
||||
}
|
||||
|
||||
impl BuildAction for BuildTarball {
|
||||
fn command(&self) -> &str {
|
||||
"chmod -R a+r $folder && tar -I '$zstd' --transform $transform -cf $tarball -C $folder ."
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
||||
let input_folder_name = self.kind.folder_name();
|
||||
let input_folder_target = format!(":bundle:folder:{input_folder_name}");
|
||||
let input_folder_path = format!("$builddir/bundle/{input_folder_name}");
|
||||
|
||||
let version = anki_version();
|
||||
let qt = match self.kind {
|
||||
DistKind::Standard => "qt6",
|
||||
};
|
||||
let output_folder_base = format!("anki-{version}-linux-{qt}");
|
||||
let output_tarball = format!("bundle/package/{output_folder_base}.tar.zst");
|
||||
|
||||
build.add_inputs("", inputs![input_folder_target]);
|
||||
build.add_variable("zstd", "zstd -c --long -T0 -18");
|
||||
build.add_variable("transform", format!("s%^.%{output_folder_base}%S"));
|
||||
build.add_variable("folder", input_folder_path);
|
||||
build.add_outputs("tarball", vec![output_tarball]);
|
||||
}
|
||||
}
|
||||
|
||||
fn build_tarball(build: &mut Build, kind: DistKind) -> Result<()> {
|
||||
let name = kind.folder_name();
|
||||
build.add_action(format!("bundle:package:{name}"), BuildTarball { kind })
|
||||
}
|
||||
|
||||
struct BuildWindowsInstallers {}
|
||||
|
||||
impl BuildAction for BuildWindowsInstallers {
|
||||
fn command(&self) -> &str {
|
||||
"cargo run -p makeexe --target-dir=out/rust -- $version $src_root $bundle_root $out"
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
||||
let version = anki_version();
|
||||
let outputs = ["qt6"].iter().map(|qt| {
|
||||
let output_base = format!("anki-{version}-windows-{qt}");
|
||||
format!("bundle/package/{output_base}.exe")
|
||||
});
|
||||
|
||||
build.add_inputs("", inputs![":bundle:folder:std"]);
|
||||
build.add_variable("version", &version);
|
||||
build.add_variable("bundle_root", "$builddir/bundle");
|
||||
build.add_outputs("out", outputs);
|
||||
}
|
||||
}
|
||||
|
||||
fn build_windows_installers(build: &mut Build) -> Result<()> {
|
||||
build.add_action("bundle:package", BuildWindowsInstallers {})
|
||||
}
|
||||
|
||||
struct BuildMacApp {
|
||||
kind: DistKind,
|
||||
}
|
||||
|
||||
impl BuildAction for BuildMacApp {
|
||||
fn command(&self) -> &str {
|
||||
"cargo run -p makeapp --target-dir=out/rust -- build-app $version $kind $stamp"
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
||||
let folder_name = self.kind.folder_name();
|
||||
build.add_inputs("", inputs![format!(":bundle:folder:{folder_name}")]);
|
||||
build.add_variable("version", anki_version());
|
||||
build.add_variable("kind", self.kind.name());
|
||||
build.add_outputs("stamp", vec![format!("bundle/app/{folder_name}.stamp")]);
|
||||
}
|
||||
}
|
||||
|
||||
fn build_mac_app(build: &mut Build, kind: DistKind) -> Result<()> {
|
||||
build.add_action(format!("bundle:app:{}", kind.name()), BuildMacApp { kind })
|
||||
}
|
||||
|
||||
struct BuildDmgs {}
|
||||
|
||||
impl BuildAction for BuildDmgs {
|
||||
fn command(&self) -> &str {
|
||||
"cargo run -p makeapp --target-dir=out/rust -- build-dmgs $dmgs"
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
|
||||
let version = anki_version();
|
||||
let platform = if targetting_macos_arm() {
|
||||
"apple"
|
||||
} else {
|
||||
"intel"
|
||||
};
|
||||
let qt = &["qt6"][..];
|
||||
let dmgs = qt
|
||||
.iter()
|
||||
.map(|qt| format!("bundle/dmg/anki-{version}-mac-{platform}-{qt}.dmg"));
|
||||
|
||||
build.add_inputs("", inputs![":bundle:app"]);
|
||||
build.add_outputs("dmgs", dmgs);
|
||||
}
|
||||
}
|
||||
|
||||
fn build_dmgs(build: &mut Build) -> Result<()> {
|
||||
build.add_action("bundle:dmg", BuildDmgs {})
|
||||
}
|
44
build/configure/src/launcher.rs
Normal file
44
build/configure/src/launcher.rs
Normal file
|
@ -0,0 +1,44 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use anyhow::Result;
|
||||
use ninja_gen::archives::download_and_extract;
|
||||
use ninja_gen::archives::empty_manifest;
|
||||
use ninja_gen::archives::OnlineArchive;
|
||||
use ninja_gen::command::RunCommand;
|
||||
use ninja_gen::hashmap;
|
||||
use ninja_gen::inputs;
|
||||
use ninja_gen::Build;
|
||||
|
||||
pub fn setup_uv_universal(build: &mut Build) -> Result<()> {
|
||||
if !cfg!(target_arch = "aarch64") {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
build.add_action(
|
||||
"launcher:uv_universal",
|
||||
RunCommand {
|
||||
command: "/usr/bin/lipo",
|
||||
args: "-create -output $out $arm_bin $x86_bin",
|
||||
inputs: hashmap! {
|
||||
"arm_bin" => inputs![":extract:uv:bin"],
|
||||
"x86_bin" => inputs![":extract:uv_mac_x86:bin"],
|
||||
},
|
||||
outputs: hashmap! {
|
||||
"out" => vec!["launcher/uv"],
|
||||
},
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
pub fn build_launcher(build: &mut Build) -> Result<()> {
|
||||
setup_uv_universal(build)?;
|
||||
download_and_extract(build, "nsis_plugins", NSIS_PLUGINS, empty_manifest())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
const NSIS_PLUGINS: OnlineArchive = OnlineArchive {
|
||||
url: "https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2023-05-19/nsis.tar.zst",
|
||||
sha256: "6133f730ece699de19714d0479c73bc848647d277e9cc80dda9b9ebe532b40a8",
|
||||
};
|
|
@ -2,7 +2,7 @@
|
|||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
mod aqt;
|
||||
mod bundle;
|
||||
mod launcher;
|
||||
mod platform;
|
||||
mod pylib;
|
||||
mod python;
|
||||
|
@ -13,13 +13,14 @@ use std::env;
|
|||
|
||||
use anyhow::Result;
|
||||
use aqt::build_and_check_aqt;
|
||||
use bundle::build_bundle;
|
||||
use launcher::build_launcher;
|
||||
use ninja_gen::glob;
|
||||
use ninja_gen::inputs;
|
||||
use ninja_gen::protobuf::check_proto;
|
||||
use ninja_gen::protobuf::setup_protoc;
|
||||
use ninja_gen::python::setup_python;
|
||||
use ninja_gen::python::setup_uv;
|
||||
use ninja_gen::Build;
|
||||
use platform::overriden_python_venv_platform;
|
||||
use pylib::build_pylib;
|
||||
use pylib::check_pylib;
|
||||
use python::check_python;
|
||||
|
@ -47,7 +48,10 @@ fn main() -> Result<()> {
|
|||
check_proto(build, inputs![glob!["proto/**/*.proto"]])?;
|
||||
|
||||
if env::var("OFFLINE_BUILD").is_err() {
|
||||
setup_python(build)?;
|
||||
setup_uv(
|
||||
build,
|
||||
overriden_python_venv_platform().unwrap_or(build.host_platform),
|
||||
)?;
|
||||
}
|
||||
setup_venv(build)?;
|
||||
|
||||
|
@ -57,7 +61,7 @@ fn main() -> Result<()> {
|
|||
build_and_check_aqt(build)?;
|
||||
|
||||
if env::var("OFFLINE_BUILD").is_err() {
|
||||
build_bundle(build)?;
|
||||
build_launcher(build)?;
|
||||
}
|
||||
|
||||
setup_sphinx(build)?;
|
||||
|
|
|
@ -5,18 +5,30 @@ use std::env;
|
|||
|
||||
use ninja_gen::archives::Platform;
|
||||
|
||||
/// Usually None to use the host architecture; can be overriden by setting
|
||||
/// MAC_X86 to build for x86_64 on Apple Silicon
|
||||
/// Please see [`overriden_python_target_platform()`] for details.
|
||||
pub fn overriden_rust_target_triple() -> Option<&'static str> {
|
||||
overriden_python_target_platform().map(|p| p.as_rust_triple())
|
||||
overriden_python_wheel_platform().map(|p| p.as_rust_triple())
|
||||
}
|
||||
|
||||
/// Usually None to use the host architecture; can be overriden by setting
|
||||
/// MAC_X86 to build for x86_64 on Apple Silicon
|
||||
pub fn overriden_python_target_platform() -> Option<Platform> {
|
||||
if env::var("MAC_X86").is_ok() {
|
||||
Some(Platform::MacX64)
|
||||
/// Usually None to use the host architecture, except on Windows which
|
||||
/// always uses x86_64, since WebEngine is unavailable for ARM64.
|
||||
pub fn overriden_python_venv_platform() -> Option<Platform> {
|
||||
if cfg!(target_os = "windows") {
|
||||
Some(Platform::WindowsX64)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Like [`overriden_python_venv_platform`], but:
|
||||
/// If MAC_X86 is set, an X86 wheel will be built on macOS ARM.
|
||||
/// If LIN_ARM64 is set, an ARM64 wheel will be built on Linux AMD64.
|
||||
pub fn overriden_python_wheel_platform() -> Option<Platform> {
|
||||
if env::var("MAC_X86").is_ok() {
|
||||
Some(Platform::MacX64)
|
||||
} else if env::var("LIN_ARM64").is_ok() {
|
||||
Some(Platform::LinuxArm)
|
||||
} else {
|
||||
overriden_python_venv_platform()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ use ninja_gen::python::PythonTest;
|
|||
use ninja_gen::Build;
|
||||
|
||||
use crate::anki_version;
|
||||
use crate::platform::overriden_python_target_platform;
|
||||
use crate::platform::overriden_python_wheel_platform;
|
||||
use crate::python::BuildWheel;
|
||||
use crate::python::GenPythonProto;
|
||||
|
||||
|
@ -50,7 +50,7 @@ pub fn build_pylib(build: &mut Build) -> Result<()> {
|
|||
output: &format!(
|
||||
"pylib/anki/_rsbridge.{}",
|
||||
match build.host_platform {
|
||||
Platform::WindowsX64 => "pyd",
|
||||
Platform::WindowsX64 | Platform::WindowsArm => "pyd",
|
||||
_ => "so",
|
||||
}
|
||||
),
|
||||
|
@ -64,13 +64,12 @@ pub fn build_pylib(build: &mut Build) -> Result<()> {
|
|||
BuildWheel {
|
||||
name: "anki",
|
||||
version: anki_version(),
|
||||
src_folder: "pylib/anki",
|
||||
gen_folder: "$builddir/pylib/anki",
|
||||
platform: overriden_python_target_platform().or(Some(build.host_platform)),
|
||||
platform: overriden_python_wheel_platform().or(Some(build.host_platform)),
|
||||
deps: inputs![
|
||||
":pylib:anki",
|
||||
glob!("pylib/anki/**"),
|
||||
"python/requirements.anki.in",
|
||||
"pylib/pyproject.toml",
|
||||
"pylib/hatch_build.py"
|
||||
],
|
||||
},
|
||||
)?;
|
||||
|
|
|
@ -1,93 +1,73 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::env;
|
||||
|
||||
use anyhow::Result;
|
||||
use ninja_gen::action::BuildAction;
|
||||
use ninja_gen::archives::Platform;
|
||||
use ninja_gen::build::FilesHandle;
|
||||
use ninja_gen::command::RunCommand;
|
||||
use ninja_gen::copy::CopyFiles;
|
||||
use ninja_gen::glob;
|
||||
use ninja_gen::hashmap;
|
||||
use ninja_gen::input::BuildInput;
|
||||
use ninja_gen::inputs;
|
||||
use ninja_gen::python::python_format;
|
||||
use ninja_gen::python::PythonEnvironment;
|
||||
use ninja_gen::python::PythonLint;
|
||||
use ninja_gen::python::PythonTypecheck;
|
||||
use ninja_gen::rsync::RsyncFiles;
|
||||
use ninja_gen::python::RuffCheck;
|
||||
use ninja_gen::Build;
|
||||
|
||||
// When updating Qt, make sure to update the .txt file in bundle.rs as well.
|
||||
/// Normalize version string by removing leading zeros from numeric parts
|
||||
/// while preserving pre-release markers (b1, rc2, a3, etc.)
|
||||
fn normalize_version(version: &str) -> String {
|
||||
version
|
||||
.split('.')
|
||||
.map(|part| {
|
||||
// Check if the part contains only digits
|
||||
if part.chars().all(|c| c.is_ascii_digit()) {
|
||||
// Numeric part: remove leading zeros
|
||||
part.parse::<u32>().unwrap_or(0).to_string()
|
||||
} else {
|
||||
// Mixed part (contains both numbers and pre-release markers)
|
||||
// Split on first non-digit character and normalize the numeric prefix
|
||||
let chars = part.chars();
|
||||
let mut numeric_prefix = String::new();
|
||||
let mut rest = String::new();
|
||||
let mut found_non_digit = false;
|
||||
|
||||
for ch in chars {
|
||||
if ch.is_ascii_digit() && !found_non_digit {
|
||||
numeric_prefix.push(ch);
|
||||
} else {
|
||||
found_non_digit = true;
|
||||
rest.push(ch);
|
||||
}
|
||||
}
|
||||
|
||||
if numeric_prefix.is_empty() {
|
||||
part.to_string()
|
||||
} else {
|
||||
let normalized_prefix = numeric_prefix.parse::<u32>().unwrap_or(0).to_string();
|
||||
format!("{normalized_prefix}{rest}")
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join(".")
|
||||
}
|
||||
|
||||
pub fn setup_venv(build: &mut Build) -> Result<()> {
|
||||
let platform_deps = if cfg!(windows) {
|
||||
inputs![
|
||||
"python/requirements.qt6_6.txt",
|
||||
"python/requirements.win.txt",
|
||||
]
|
||||
} else if cfg!(target_os = "macos") {
|
||||
inputs!["python/requirements.qt6_6.txt",]
|
||||
} else if std::env::var("PYTHONPATH").is_ok() {
|
||||
// assume we have a system-provided Qt
|
||||
inputs![]
|
||||
} else if cfg!(target_arch = "aarch64") {
|
||||
inputs!["python/requirements.qt6_8.txt"]
|
||||
} else {
|
||||
inputs!["python/requirements.qt6_6.txt"]
|
||||
};
|
||||
let requirements_txt = inputs!["python/requirements.dev.txt", platform_deps];
|
||||
let extra_binary_exports = &["mypy", "ruff", "pytest", "protoc-gen-mypy"];
|
||||
build.add_action(
|
||||
"pyenv",
|
||||
PythonEnvironment {
|
||||
folder: "pyenv",
|
||||
base_requirements_txt: inputs!["python/requirements.base.txt"],
|
||||
requirements_txt,
|
||||
extra_binary_exports: &[
|
||||
"pip-compile",
|
||||
"pip-sync",
|
||||
"mypy",
|
||||
"black", // Required for offline build
|
||||
"isort",
|
||||
"pylint",
|
||||
"pytest",
|
||||
"protoc-gen-mypy", // ditto
|
||||
venv_folder: "pyenv",
|
||||
deps: inputs![
|
||||
"pyproject.toml",
|
||||
"pylib/pyproject.toml",
|
||||
"qt/pyproject.toml",
|
||||
"uv.lock"
|
||||
],
|
||||
},
|
||||
)?;
|
||||
|
||||
// optional venvs for testing other Qt versions
|
||||
let mut venv_reqs = inputs!["python/requirements.bundle.txt"];
|
||||
if cfg!(windows) {
|
||||
venv_reqs = inputs![venv_reqs, "python/requirements.win.txt"];
|
||||
}
|
||||
|
||||
build.add_action(
|
||||
"pyenv-qt6.8",
|
||||
PythonEnvironment {
|
||||
folder: "pyenv-qt6.8",
|
||||
base_requirements_txt: inputs!["python/requirements.base.txt"],
|
||||
requirements_txt: inputs![&venv_reqs, "python/requirements.qt6_8.txt"],
|
||||
extra_binary_exports: &[],
|
||||
},
|
||||
)?;
|
||||
build.add_action(
|
||||
"pyenv-qt5.15",
|
||||
PythonEnvironment {
|
||||
folder: "pyenv-qt5.15",
|
||||
base_requirements_txt: inputs!["python/requirements.base.txt"],
|
||||
requirements_txt: inputs![&venv_reqs, "python/requirements.qt5_15.txt"],
|
||||
extra_binary_exports: &[],
|
||||
},
|
||||
)?;
|
||||
build.add_action(
|
||||
"pyenv-qt5.14",
|
||||
PythonEnvironment {
|
||||
folder: "pyenv-qt5.14",
|
||||
base_requirements_txt: inputs!["python/requirements.base.txt"],
|
||||
requirements_txt: inputs![venv_reqs, "python/requirements.qt5_14.txt"],
|
||||
extra_binary_exports: &[],
|
||||
extra_args: "--all-packages --extra qt --extra audio",
|
||||
extra_binary_exports,
|
||||
},
|
||||
)?;
|
||||
|
||||
|
@ -133,45 +113,66 @@ impl BuildAction for GenPythonProto {
|
|||
pub struct BuildWheel {
|
||||
pub name: &'static str,
|
||||
pub version: String,
|
||||
pub src_folder: &'static str,
|
||||
pub gen_folder: &'static str,
|
||||
pub platform: Option<Platform>,
|
||||
pub deps: BuildInput,
|
||||
}
|
||||
|
||||
impl BuildAction for BuildWheel {
|
||||
fn command(&self) -> &str {
|
||||
"$pyenv_bin $script $src $gen $out"
|
||||
"$uv build --wheel --out-dir=$out_dir --project=$project_dir"
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl FilesHandle) {
|
||||
build.add_inputs("pyenv_bin", inputs![":pyenv:bin"]);
|
||||
build.add_inputs("script", inputs!["python/write_wheel.py"]);
|
||||
build.add_inputs("", &self.deps);
|
||||
build.add_variable("src", self.src_folder);
|
||||
build.add_variable("gen", self.gen_folder);
|
||||
if std::env::var("OFFLINE_BUILD").ok().as_deref() == Some("1") {
|
||||
let uv_path =
|
||||
std::env::var("UV_BINARY").expect("UV_BINARY must be set in OFFLINE_BUILD mode");
|
||||
build.add_inputs("uv", inputs![uv_path]);
|
||||
} else {
|
||||
build.add_inputs("uv", inputs![":uv_binary"]);
|
||||
}
|
||||
|
||||
build.add_inputs("", &self.deps);
|
||||
|
||||
// Set the project directory based on which package we're building
|
||||
let project_dir = if self.name == "anki" { "pylib" } else { "qt" };
|
||||
build.add_variable("project_dir", project_dir);
|
||||
|
||||
// Set environment variable for uv to use our pyenv
|
||||
build.add_variable("pyenv_path", "$builddir/pyenv");
|
||||
build.add_env_var("UV_PROJECT_ENVIRONMENT", "$pyenv_path");
|
||||
|
||||
// Set output directory
|
||||
build.add_variable("out_dir", "$builddir/wheels/");
|
||||
|
||||
// Calculate the wheel filename that uv will generate
|
||||
let tag = if let Some(platform) = self.platform {
|
||||
let platform = match platform {
|
||||
Platform::LinuxX64 => "manylinux_2_35_x86_64",
|
||||
Platform::LinuxArm => "manylinux_2_35_aarch64",
|
||||
let platform_tag = match platform {
|
||||
Platform::LinuxX64 => "manylinux_2_36_x86_64",
|
||||
Platform::LinuxArm => "manylinux_2_36_aarch64",
|
||||
Platform::MacX64 => "macosx_12_0_x86_64",
|
||||
Platform::MacArm => "macosx_12_0_arm64",
|
||||
Platform::WindowsX64 => "win_amd64",
|
||||
Platform::WindowsArm => "win_arm64",
|
||||
};
|
||||
format!("cp39-abi3-{platform}")
|
||||
format!("cp39-abi3-{platform_tag}")
|
||||
} else {
|
||||
"py3-none-any".into()
|
||||
};
|
||||
|
||||
// Set environment variable for hatch_build.py to use the correct platform tag
|
||||
build.add_variable("wheel_tag", &tag);
|
||||
build.add_env_var("ANKI_WHEEL_TAG", "$wheel_tag");
|
||||
|
||||
let name = self.name;
|
||||
let version = &self.version;
|
||||
let wheel_path = format!("wheels/{name}-{version}-{tag}.whl");
|
||||
|
||||
let normalized_version = normalize_version(&self.version);
|
||||
|
||||
let wheel_path = format!("wheels/{name}-{normalized_version}-{tag}.whl");
|
||||
build.add_outputs("out", vec![wheel_path]);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn check_python(build: &mut Build) -> Result<()> {
|
||||
python_format(build, "ftl", inputs![glob!("ftl/**/*.py")])?;
|
||||
python_format(build, "tools", inputs![glob!("tools/**/*.py")])?;
|
||||
|
||||
build.add_action(
|
||||
|
@ -183,7 +184,6 @@ pub fn check_python(build: &mut Build) -> Result<()> {
|
|||
"qt/tools",
|
||||
"out/pylib/anki",
|
||||
"out/qt/_aqt",
|
||||
"ftl",
|
||||
"python",
|
||||
"tools",
|
||||
],
|
||||
|
@ -195,60 +195,26 @@ pub fn check_python(build: &mut Build) -> Result<()> {
|
|||
},
|
||||
)?;
|
||||
|
||||
add_pylint(build)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn add_pylint(build: &mut Build) -> Result<()> {
|
||||
// pylint does not support PEP420 implicit namespaces split across import paths,
|
||||
// so we need to merge our pylib sources and generated files before invoking it,
|
||||
// and add a top-level __init__.py
|
||||
let ruff_folders = &["qt/aqt", "ftl", "pylib/tools", "tools", "python"];
|
||||
let ruff_deps = inputs![
|
||||
glob!["{pylib,ftl,qt,python,tools}/**/*.py"],
|
||||
":pylib:anki",
|
||||
":qt:aqt"
|
||||
];
|
||||
build.add_action(
|
||||
"check:pylint:copy_pylib",
|
||||
RsyncFiles {
|
||||
inputs: inputs![":pylib:anki"],
|
||||
target_folder: "pylint/anki",
|
||||
strip_prefix: "$builddir/pylib/anki",
|
||||
// avoid copying our large rsbridge binary
|
||||
extra_args: "--links",
|
||||
"check:ruff",
|
||||
RuffCheck {
|
||||
folders: ruff_folders,
|
||||
deps: ruff_deps.clone(),
|
||||
check_only: true,
|
||||
},
|
||||
)?;
|
||||
build.add_action(
|
||||
"check:pylint:copy_pylib",
|
||||
RsyncFiles {
|
||||
inputs: inputs![glob!["pylib/anki/**"]],
|
||||
target_folder: "pylint/anki",
|
||||
strip_prefix: "pylib/anki",
|
||||
extra_args: "",
|
||||
},
|
||||
)?;
|
||||
build.add_action(
|
||||
"check:pylint:copy_pylib",
|
||||
RunCommand {
|
||||
command: ":pyenv:bin",
|
||||
args: "$script $out",
|
||||
inputs: hashmap! { "script" => inputs!["python/mkempty.py"] },
|
||||
outputs: hashmap! { "out" => vec!["pylint/anki/__init__.py"] },
|
||||
},
|
||||
)?;
|
||||
build.add_action(
|
||||
"check:pylint",
|
||||
PythonLint {
|
||||
folders: &[
|
||||
"$builddir/pylint/anki",
|
||||
"qt/aqt",
|
||||
"ftl",
|
||||
"pylib/tools",
|
||||
"tools",
|
||||
"python",
|
||||
],
|
||||
pylint_ini: inputs![".pylintrc"],
|
||||
deps: inputs![
|
||||
":check:pylint:copy_pylib",
|
||||
":qt:aqt",
|
||||
glob!("{pylib/tools,ftl,qt,python,tools}/**/*.py")
|
||||
],
|
||||
"fix:ruff",
|
||||
RuffCheck {
|
||||
folders: ruff_folders,
|
||||
deps: ruff_deps,
|
||||
check_only: false,
|
||||
},
|
||||
)?;
|
||||
|
||||
|
@ -261,17 +227,23 @@ struct Sphinx {
|
|||
|
||||
impl BuildAction for Sphinx {
|
||||
fn command(&self) -> &str {
|
||||
if env::var("OFFLINE_BUILD").is_err() {
|
||||
"$pip install sphinx sphinx_rtd_theme sphinx-autoapi \
|
||||
&& $python python/sphinx/build.py"
|
||||
} else {
|
||||
if std::env::var("OFFLINE_BUILD").ok().as_deref() == Some("1") {
|
||||
"$python python/sphinx/build.py"
|
||||
} else {
|
||||
"$uv sync --extra sphinx && $python python/sphinx/build.py"
|
||||
}
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl FilesHandle) {
|
||||
if env::var("OFFLINE_BUILD").is_err() {
|
||||
build.add_inputs("pip", inputs![":pyenv:pip"]);
|
||||
if std::env::var("OFFLINE_BUILD").ok().as_deref() == Some("1") {
|
||||
let uv_path =
|
||||
std::env::var("UV_BINARY").expect("UV_BINARY must be set in OFFLINE_BUILD mode");
|
||||
build.add_inputs("uv", inputs![uv_path]);
|
||||
} else {
|
||||
build.add_inputs("uv", inputs![":uv_binary"]);
|
||||
// Set environment variable to use the existing pyenv
|
||||
build.add_variable("pyenv_path", "$builddir/pyenv");
|
||||
build.add_env_var("UV_PROJECT_ENVIRONMENT", "$pyenv_path");
|
||||
}
|
||||
build.add_inputs("python", inputs![":pyenv:bin"]);
|
||||
build.add_inputs("", &self.deps);
|
||||
|
@ -294,8 +266,35 @@ pub(crate) fn setup_sphinx(build: &mut Build) -> Result<()> {
|
|||
build.add_action(
|
||||
"python:sphinx",
|
||||
Sphinx {
|
||||
deps: inputs![":pylib", ":qt", ":python:sphinx:copy_conf"],
|
||||
deps: inputs![
|
||||
":pylib",
|
||||
":qt",
|
||||
":python:sphinx:copy_conf",
|
||||
"pyproject.toml"
|
||||
],
|
||||
},
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_normalize_version_basic() {
|
||||
assert_eq!(normalize_version("1.2.3"), "1.2.3");
|
||||
assert_eq!(normalize_version("01.02.03"), "1.2.3");
|
||||
assert_eq!(normalize_version("1.0.0"), "1.0.0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_version_with_prerelease() {
|
||||
assert_eq!(normalize_version("1.2.3b1"), "1.2.3b1");
|
||||
assert_eq!(normalize_version("01.02.03b1"), "1.2.3b1");
|
||||
assert_eq!(normalize_version("1.0.0rc2"), "1.0.0rc2");
|
||||
assert_eq!(normalize_version("2.1.0a3"), "2.1.0a3");
|
||||
assert_eq!(normalize_version("1.2.3beta1"), "1.2.3beta1");
|
||||
assert_eq!(normalize_version("1.2.3alpha1"), "1.2.3alpha1");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -154,7 +154,7 @@ fn build_rsbridge(build: &mut Build) -> Result<()> {
|
|||
"$builddir/buildhash",
|
||||
// building on Windows requires python3.lib
|
||||
if cfg!(windows) {
|
||||
inputs![":extract:python"]
|
||||
inputs![":pyenv:bin"]
|
||||
} else {
|
||||
inputs![]
|
||||
}
|
||||
|
@ -169,7 +169,7 @@ fn build_rsbridge(build: &mut Build) -> Result<()> {
|
|||
|
||||
pub fn check_rust(build: &mut Build) -> Result<()> {
|
||||
let inputs = inputs![
|
||||
glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,tools/workspace-hack/**}"),
|
||||
glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,qt/launcher/**,tools/minilints/**}"),
|
||||
"Cargo.lock",
|
||||
"Cargo.toml",
|
||||
"rust-toolchain.toml",
|
||||
|
@ -246,8 +246,8 @@ pub fn check_minilints(build: &mut Build) -> Result<()> {
|
|||
|
||||
let files = inputs![
|
||||
glob![
|
||||
"**/*.{py,rs,ts,svelte,mjs}",
|
||||
"{node_modules,qt/bundle/PyOxidizer,ts/.svelte-kit}/**"
|
||||
"**/*.{py,rs,ts,svelte,mjs,md}",
|
||||
"{node_modules,ts/.svelte-kit}/**"
|
||||
],
|
||||
"Cargo.lock"
|
||||
];
|
||||
|
|
|
@ -16,5 +16,26 @@ globset.workspace = true
|
|||
itertools.workspace = true
|
||||
maplit.workspace = true
|
||||
num_cpus.workspace = true
|
||||
regex.workspace = true
|
||||
serde_json.workspace = true
|
||||
sha2.workspace = true
|
||||
walkdir.workspace = true
|
||||
which.workspace = true
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
reqwest = { workspace = true, features = ["blocking", "json", "native-tls"] }
|
||||
|
||||
[target.'cfg(not(windows))'.dependencies]
|
||||
reqwest = { workspace = true, features = ["blocking", "json", "rustls-tls"] }
|
||||
|
||||
[[bin]]
|
||||
name = "update_uv"
|
||||
path = "src/bin/update_uv.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "update_protoc"
|
||||
path = "src/bin/update_protoc.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "update_node"
|
||||
path = "src/bin/update_node.rs"
|
||||
|
|
|
@ -49,6 +49,46 @@ pub trait BuildAction {
|
|||
}
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
std::any::type_name::<Self>().split("::").last().unwrap()
|
||||
std::any::type_name::<Self>()
|
||||
.split("::")
|
||||
.last()
|
||||
.unwrap()
|
||||
.split('<')
|
||||
.next()
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
trait TestBuildAction {}
|
||||
|
||||
#[cfg(test)]
|
||||
impl<T: TestBuildAction + ?Sized> BuildAction for T {
|
||||
fn command(&self) -> &str {
|
||||
"test"
|
||||
}
|
||||
fn files(&mut self, _build: &mut impl FilesHandle) {}
|
||||
}
|
||||
|
||||
#[allow(dead_code, unused_variables)]
|
||||
#[test]
|
||||
fn should_strip_regions_in_type_name() {
|
||||
struct Bare;
|
||||
impl TestBuildAction for Bare {}
|
||||
assert_eq!(Bare {}.name(), "Bare");
|
||||
|
||||
struct WithLifeTime<'a>(&'a str);
|
||||
impl TestBuildAction for WithLifeTime<'_> {}
|
||||
assert_eq!(WithLifeTime("test").name(), "WithLifeTime");
|
||||
|
||||
struct WithMultiLifeTime<'a, 'b>(&'a str, &'b str);
|
||||
impl TestBuildAction for WithMultiLifeTime<'_, '_> {}
|
||||
assert_eq!(
|
||||
WithMultiLifeTime("test", "test").name(),
|
||||
"WithMultiLifeTime"
|
||||
);
|
||||
|
||||
struct WithGeneric<T>(T);
|
||||
impl<T> TestBuildAction for WithGeneric<T> {}
|
||||
assert_eq!(WithGeneric(3).name(), "WithGeneric");
|
||||
}
|
||||
|
|
|
@ -26,22 +26,21 @@ pub enum Platform {
|
|||
MacX64,
|
||||
MacArm,
|
||||
WindowsX64,
|
||||
WindowsArm,
|
||||
}
|
||||
|
||||
impl Platform {
|
||||
pub fn current() -> Self {
|
||||
if cfg!(windows) {
|
||||
Self::WindowsX64
|
||||
} else {
|
||||
let os = std::env::consts::OS;
|
||||
let arch = std::env::consts::ARCH;
|
||||
match (os, arch) {
|
||||
("linux", "x86_64") => Self::LinuxX64,
|
||||
("linux", "aarch64") => Self::LinuxArm,
|
||||
("macos", "x86_64") => Self::MacX64,
|
||||
("macos", "aarch64") => Self::MacArm,
|
||||
_ => panic!("unsupported os/arch {os} {arch} - PR welcome!"),
|
||||
}
|
||||
let os = std::env::consts::OS;
|
||||
let arch = std::env::consts::ARCH;
|
||||
match (os, arch) {
|
||||
("linux", "x86_64") => Self::LinuxX64,
|
||||
("linux", "aarch64") => Self::LinuxArm,
|
||||
("macos", "x86_64") => Self::MacX64,
|
||||
("macos", "aarch64") => Self::MacArm,
|
||||
("windows", "x86_64") => Self::WindowsX64,
|
||||
("windows", "aarch64") => Self::WindowsArm,
|
||||
_ => panic!("unsupported os/arch {os} {arch} - PR welcome!"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -62,12 +61,13 @@ impl Platform {
|
|||
Platform::MacX64 => "x86_64-apple-darwin",
|
||||
Platform::MacArm => "aarch64-apple-darwin",
|
||||
Platform::WindowsX64 => "x86_64-pc-windows-msvc",
|
||||
Platform::WindowsArm => "aarch64-pc-windows-msvc",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Append .exe to path if on Windows.
|
||||
pub fn with_exe(path: &str) -> Cow<str> {
|
||||
pub fn with_exe(path: &str) -> Cow<'_, str> {
|
||||
if cfg!(windows) {
|
||||
format!("{path}.exe").into()
|
||||
} else {
|
||||
|
|
268
build/ninja_gen/src/bin/update_node.rs
Normal file
268
build/ninja_gen/src/bin/update_node.rs
Normal file
|
@ -0,0 +1,268 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::error::Error;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use regex::Regex;
|
||||
use reqwest::blocking::Client;
|
||||
use serde_json::Value;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct NodeRelease {
|
||||
version: String,
|
||||
files: Vec<NodeFile>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct NodeFile {
|
||||
filename: String,
|
||||
url: String,
|
||||
}
|
||||
|
||||
fn main() -> Result<(), Box<dyn Error>> {
|
||||
let release_info = fetch_node_release_info()?;
|
||||
let new_text = generate_node_archive_function(&release_info)?;
|
||||
update_node_text(&new_text)?;
|
||||
println!("Node.js archive function updated successfully!");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn fetch_node_release_info() -> Result<NodeRelease, Box<dyn Error>> {
|
||||
let client = Client::new();
|
||||
|
||||
// Get the Node.js release info
|
||||
let response = client
|
||||
.get("https://nodejs.org/dist/index.json")
|
||||
.header("User-Agent", "anki-build-updater")
|
||||
.send()?;
|
||||
|
||||
let releases: Vec<Value> = response.json()?;
|
||||
|
||||
// Find the latest LTS release
|
||||
let latest = releases
|
||||
.iter()
|
||||
.find(|release| {
|
||||
// LTS releases have a non-false "lts" field
|
||||
release["lts"].as_str().is_some() && release["lts"] != false
|
||||
})
|
||||
.ok_or("No LTS releases found")?;
|
||||
|
||||
let version = latest["version"]
|
||||
.as_str()
|
||||
.ok_or("Version not found")?
|
||||
.to_string();
|
||||
|
||||
let files = latest["files"]
|
||||
.as_array()
|
||||
.ok_or("Files array not found")?
|
||||
.iter()
|
||||
.map(|f| f.as_str().unwrap_or(""))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let lts_name = latest["lts"].as_str().unwrap_or("unknown");
|
||||
println!("Found Node.js LTS version: {version} ({lts_name})");
|
||||
|
||||
// Map platforms to their expected file keys and full filenames
|
||||
let platform_mapping = vec![
|
||||
(
|
||||
"linux-x64",
|
||||
"linux-x64",
|
||||
format!("node-{version}-linux-x64.tar.xz"),
|
||||
),
|
||||
(
|
||||
"linux-arm64",
|
||||
"linux-arm64",
|
||||
format!("node-{version}-linux-arm64.tar.xz"),
|
||||
),
|
||||
(
|
||||
"darwin-x64",
|
||||
"osx-x64-tar",
|
||||
format!("node-{version}-darwin-x64.tar.xz"),
|
||||
),
|
||||
(
|
||||
"darwin-arm64",
|
||||
"osx-arm64-tar",
|
||||
format!("node-{version}-darwin-arm64.tar.xz"),
|
||||
),
|
||||
(
|
||||
"win-x64",
|
||||
"win-x64-zip",
|
||||
format!("node-{version}-win-x64.zip"),
|
||||
),
|
||||
(
|
||||
"win-arm64",
|
||||
"win-arm64-zip",
|
||||
format!("node-{version}-win-arm64.zip"),
|
||||
),
|
||||
];
|
||||
|
||||
let mut node_files = Vec::new();
|
||||
|
||||
for (platform, file_key, filename) in platform_mapping {
|
||||
// Check if this file exists in the release
|
||||
if files.contains(&file_key) {
|
||||
let url = format!("https://nodejs.org/dist/{version}/{filename}");
|
||||
node_files.push(NodeFile {
|
||||
filename: filename.clone(),
|
||||
url,
|
||||
});
|
||||
println!("Found file for {platform}: {filename} (key: {file_key})");
|
||||
} else {
|
||||
return Err(
|
||||
format!("File not found for {platform} (key: {file_key}): {filename}").into(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(NodeRelease {
|
||||
version,
|
||||
files: node_files,
|
||||
})
|
||||
}
|
||||
|
||||
fn generate_node_archive_function(release: &NodeRelease) -> Result<String, Box<dyn Error>> {
|
||||
let client = Client::new();
|
||||
|
||||
// Fetch the SHASUMS256.txt file once
|
||||
println!("Fetching SHA256 checksums...");
|
||||
let shasums_url = format!("https://nodejs.org/dist/{}/SHASUMS256.txt", release.version);
|
||||
let shasums_response = client
|
||||
.get(&shasums_url)
|
||||
.header("User-Agent", "anki-build-updater")
|
||||
.send()?;
|
||||
let shasums_text = shasums_response.text()?;
|
||||
|
||||
// Create a mapping from filename patterns to platform names - using the exact
|
||||
// patterns we stored in files
|
||||
let platform_mapping = vec![
|
||||
("linux-x64.tar.xz", "LinuxX64"),
|
||||
("linux-arm64.tar.xz", "LinuxArm"),
|
||||
("darwin-x64.tar.xz", "MacX64"),
|
||||
("darwin-arm64.tar.xz", "MacArm"),
|
||||
("win-x64.zip", "WindowsX64"),
|
||||
("win-arm64.zip", "WindowsArm"),
|
||||
];
|
||||
|
||||
let mut platform_blocks = Vec::new();
|
||||
|
||||
for (file_pattern, platform_name) in platform_mapping {
|
||||
// Find the file that ends with this pattern
|
||||
if let Some(file) = release
|
||||
.files
|
||||
.iter()
|
||||
.find(|f| f.filename.ends_with(file_pattern))
|
||||
{
|
||||
// Find the SHA256 for this file
|
||||
let sha256 = shasums_text
|
||||
.lines()
|
||||
.find(|line| line.contains(&file.filename))
|
||||
.and_then(|line| line.split_whitespace().next())
|
||||
.ok_or_else(|| format!("SHA256 not found for {}", file.filename))?;
|
||||
|
||||
println!(
|
||||
"Found SHA256 for {}: {} => {}",
|
||||
platform_name, file.filename, sha256
|
||||
);
|
||||
|
||||
let block = format!(
|
||||
" Platform::{} => OnlineArchive {{\n url: \"{}\",\n sha256: \"{}\",\n }},",
|
||||
platform_name, file.url, sha256
|
||||
);
|
||||
platform_blocks.push(block);
|
||||
} else {
|
||||
return Err(format!(
|
||||
"File not found for platform {platform_name}: no file ending with {file_pattern}"
|
||||
)
|
||||
.into());
|
||||
}
|
||||
}
|
||||
|
||||
let function = format!(
|
||||
"pub fn node_archive(platform: Platform) -> OnlineArchive {{\n match platform {{\n{}\n }}\n}}",
|
||||
platform_blocks.join("\n")
|
||||
);
|
||||
|
||||
Ok(function)
|
||||
}
|
||||
|
||||
fn update_node_text(new_function: &str) -> Result<(), Box<dyn Error>> {
|
||||
let node_rs_content = read_node_rs()?;
|
||||
|
||||
// Regex to match the entire node_archive function with proper multiline
|
||||
// matching
|
||||
let re = Regex::new(
|
||||
r"(?s)pub fn node_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}",
|
||||
)?;
|
||||
|
||||
let updated_content = re.replace(&node_rs_content, new_function);
|
||||
|
||||
write_node_rs(&updated_content)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn read_node_rs() -> Result<String, Box<dyn Error>> {
|
||||
// Use CARGO_MANIFEST_DIR to get the crate root, then find src/node.rs
|
||||
let manifest_dir =
|
||||
std::env::var("CARGO_MANIFEST_DIR").map_err(|_| "CARGO_MANIFEST_DIR not set")?;
|
||||
let path = Path::new(&manifest_dir).join("src").join("node.rs");
|
||||
Ok(fs::read_to_string(path)?)
|
||||
}
|
||||
|
||||
fn write_node_rs(content: &str) -> Result<(), Box<dyn Error>> {
|
||||
// Use CARGO_MANIFEST_DIR to get the crate root, then find src/node.rs
|
||||
let manifest_dir =
|
||||
std::env::var("CARGO_MANIFEST_DIR").map_err(|_| "CARGO_MANIFEST_DIR not set")?;
|
||||
let path = Path::new(&manifest_dir).join("src").join("node.rs");
|
||||
fs::write(path, content)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_regex_replacement() {
|
||||
let sample_content = r#"Some other code
|
||||
pub fn node_archive(platform: Platform) -> OnlineArchive {
|
||||
match platform {
|
||||
Platform::LinuxX64 => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-x64.tar.xz",
|
||||
sha256: "old_hash",
|
||||
},
|
||||
Platform::MacX64 => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-x64.tar.xz",
|
||||
sha256: "old_hash",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
More code here"#;
|
||||
|
||||
let new_function = r#"pub fn node_archive(platform: Platform) -> OnlineArchive {
|
||||
match platform {
|
||||
Platform::LinuxX64 => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v21.0.0/node-v21.0.0-linux-x64.tar.xz",
|
||||
sha256: "new_hash",
|
||||
},
|
||||
Platform::MacX64 => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v21.0.0/node-v21.0.0-darwin-x64.tar.xz",
|
||||
sha256: "new_hash",
|
||||
},
|
||||
}
|
||||
}"#;
|
||||
|
||||
let re = Regex::new(
|
||||
r"(?s)pub fn node_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}"
|
||||
).unwrap();
|
||||
|
||||
let result = re.replace(sample_content, new_function);
|
||||
assert!(result.contains("v21.0.0"));
|
||||
assert!(result.contains("new_hash"));
|
||||
assert!(!result.contains("old_hash"));
|
||||
assert!(result.contains("Some other code"));
|
||||
assert!(result.contains("More code here"));
|
||||
}
|
||||
}
|
125
build/ninja_gen/src/bin/update_protoc.rs
Normal file
125
build/ninja_gen/src/bin/update_protoc.rs
Normal file
|
@ -0,0 +1,125 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::error::Error;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use regex::Regex;
|
||||
use reqwest::blocking::Client;
|
||||
use serde_json::Value;
|
||||
use sha2::Digest;
|
||||
use sha2::Sha256;
|
||||
|
||||
fn fetch_protoc_release_info() -> Result<String, Box<dyn Error>> {
|
||||
let client = Client::new();
|
||||
|
||||
println!("Fetching latest protoc release info from GitHub...");
|
||||
// Fetch latest release info
|
||||
let response = client
|
||||
.get("https://api.github.com/repos/protocolbuffers/protobuf/releases/latest")
|
||||
.header("User-Agent", "Anki-Build-Script")
|
||||
.send()?;
|
||||
|
||||
let release_info: Value = response.json()?;
|
||||
let assets = release_info["assets"]
|
||||
.as_array()
|
||||
.expect("assets should be an array");
|
||||
|
||||
// Map platform names to their corresponding asset patterns
|
||||
let platform_patterns = [
|
||||
("LinuxX64", "linux-x86_64"),
|
||||
("LinuxArm", "linux-aarch_64"),
|
||||
("MacX64", "osx-universal_binary"), // Mac uses universal binary for both
|
||||
("MacArm", "osx-universal_binary"),
|
||||
("WindowsX64", "win64"), // Windows uses x86 binary for both archs
|
||||
("WindowsArm", "win64"),
|
||||
];
|
||||
|
||||
let mut match_blocks = Vec::new();
|
||||
|
||||
for (platform, pattern) in platform_patterns {
|
||||
// Find the asset matching the platform pattern
|
||||
let asset = assets.iter().find(|asset| {
|
||||
let name = asset["name"].as_str().unwrap_or("");
|
||||
name.starts_with("protoc-") && name.contains(pattern) && name.ends_with(".zip")
|
||||
});
|
||||
|
||||
if asset.is_none() {
|
||||
eprintln!("No asset found for platform {platform} pattern {pattern}");
|
||||
continue;
|
||||
}
|
||||
|
||||
let asset = asset.unwrap();
|
||||
let download_url = asset["browser_download_url"].as_str().unwrap();
|
||||
let asset_name = asset["name"].as_str().unwrap();
|
||||
|
||||
// Download the file and calculate SHA256 locally
|
||||
println!("Downloading and checksumming {asset_name} for {platform}...");
|
||||
let response = client
|
||||
.get(download_url)
|
||||
.header("User-Agent", "Anki-Build-Script")
|
||||
.send()?;
|
||||
|
||||
let bytes = response.bytes()?;
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(&bytes);
|
||||
let sha256 = format!("{:x}", hasher.finalize());
|
||||
|
||||
// Handle platform-specific match patterns
|
||||
let match_pattern = match platform {
|
||||
"MacX64" => "Platform::MacX64 | Platform::MacArm",
|
||||
"MacArm" => continue, // Skip MacArm since it's handled with MacX64
|
||||
"WindowsX64" => "Platform::WindowsX64 | Platform::WindowsArm",
|
||||
"WindowsArm" => continue, // Skip WindowsArm since it's handled with WindowsX64
|
||||
_ => &format!("Platform::{platform}"),
|
||||
};
|
||||
|
||||
match_blocks.push(format!(
|
||||
" {match_pattern} => {{\n OnlineArchive {{\n url: \"{download_url}\",\n sha256: \"{sha256}\",\n }}\n }}"
|
||||
));
|
||||
}
|
||||
|
||||
Ok(format!(
|
||||
"pub fn protoc_archive(platform: Platform) -> OnlineArchive {{\n match platform {{\n{}\n }}\n}}",
|
||||
match_blocks.join(",\n")
|
||||
))
|
||||
}
|
||||
|
||||
fn read_protobuf_rs() -> Result<String, Box<dyn Error>> {
|
||||
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
|
||||
let path = Path::new(&manifest_dir).join("src/protobuf.rs");
|
||||
println!("Reading {}", path.display());
|
||||
let content = fs::read_to_string(path)?;
|
||||
Ok(content)
|
||||
}
|
||||
|
||||
fn update_protoc_text(old_text: &str, new_protoc_text: &str) -> Result<String, Box<dyn Error>> {
|
||||
let re =
|
||||
Regex::new(r"(?ms)^pub fn protoc_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\}")
|
||||
.unwrap();
|
||||
if !re.is_match(old_text) {
|
||||
return Err("Could not find protoc_archive function block to replace".into());
|
||||
}
|
||||
let new_content = re.replace(old_text, new_protoc_text).to_string();
|
||||
println!("Original lines: {}", old_text.lines().count());
|
||||
println!("Updated lines: {}", new_content.lines().count());
|
||||
Ok(new_content)
|
||||
}
|
||||
|
||||
fn write_protobuf_rs(content: &str) -> Result<(), Box<dyn Error>> {
|
||||
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
|
||||
let path = Path::new(&manifest_dir).join("src/protobuf.rs");
|
||||
println!("Writing to {}", path.display());
|
||||
fs::write(path, content)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn main() -> Result<(), Box<dyn Error>> {
|
||||
let new_protoc_archive = fetch_protoc_release_info()?;
|
||||
let content = read_protobuf_rs()?;
|
||||
let updated_content = update_protoc_text(&content, &new_protoc_archive)?;
|
||||
write_protobuf_rs(&updated_content)?;
|
||||
println!("Successfully updated protoc_archive function in protobuf.rs");
|
||||
Ok(())
|
||||
}
|
140
build/ninja_gen/src/bin/update_uv.rs
Normal file
140
build/ninja_gen/src/bin/update_uv.rs
Normal file
|
@ -0,0 +1,140 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::error::Error;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use regex::Regex;
|
||||
use reqwest::blocking::Client;
|
||||
use serde_json::Value;
|
||||
|
||||
fn fetch_uv_release_info() -> Result<String, Box<dyn Error>> {
|
||||
let client = Client::new();
|
||||
|
||||
println!("Fetching latest uv release info from GitHub...");
|
||||
// Fetch latest release info
|
||||
let response = client
|
||||
.get("https://api.github.com/repos/astral-sh/uv/releases/latest")
|
||||
.header("User-Agent", "Anki-Build-Script")
|
||||
.send()?;
|
||||
|
||||
let release_info: Value = response.json()?;
|
||||
let assets = release_info["assets"]
|
||||
.as_array()
|
||||
.expect("assets should be an array");
|
||||
|
||||
// Map platform names to their corresponding asset patterns
|
||||
let platform_patterns = [
|
||||
("LinuxX64", "x86_64-unknown-linux-gnu"),
|
||||
("LinuxArm", "aarch64-unknown-linux-gnu"),
|
||||
("MacX64", "x86_64-apple-darwin"),
|
||||
("MacArm", "aarch64-apple-darwin"),
|
||||
("WindowsX64", "x86_64-pc-windows-msvc"),
|
||||
("WindowsArm", "aarch64-pc-windows-msvc"),
|
||||
];
|
||||
|
||||
let mut match_blocks = Vec::new();
|
||||
|
||||
for (platform, pattern) in platform_patterns {
|
||||
// Find the asset matching the platform pattern (the binary)
|
||||
let asset = assets.iter().find(|asset| {
|
||||
let name = asset["name"].as_str().unwrap_or("");
|
||||
name.contains(pattern) && (name.ends_with(".tar.gz") || name.ends_with(".zip"))
|
||||
});
|
||||
if asset.is_none() {
|
||||
eprintln!("No asset found for platform {platform} pattern {pattern}");
|
||||
continue;
|
||||
}
|
||||
let asset = asset.unwrap();
|
||||
let download_url = asset["browser_download_url"].as_str().unwrap();
|
||||
let asset_name = asset["name"].as_str().unwrap();
|
||||
|
||||
// Find the corresponding .sha256 or .sha256sum asset
|
||||
let sha_asset = assets.iter().find(|a| {
|
||||
let name = a["name"].as_str().unwrap_or("");
|
||||
name == format!("{asset_name}.sha256") || name == format!("{asset_name}.sha256sum")
|
||||
});
|
||||
if sha_asset.is_none() {
|
||||
eprintln!("No sha256 asset found for {asset_name}");
|
||||
continue;
|
||||
}
|
||||
let sha_asset = sha_asset.unwrap();
|
||||
let sha_url = sha_asset["browser_download_url"].as_str().unwrap();
|
||||
println!("Fetching SHA256 for {platform}...");
|
||||
let sha_text = client
|
||||
.get(sha_url)
|
||||
.header("User-Agent", "Anki-Build-Script")
|
||||
.send()?
|
||||
.text()?;
|
||||
// The sha file is usually of the form: "<sha256> <filename>"
|
||||
let sha256 = sha_text.split_whitespace().next().unwrap_or("");
|
||||
|
||||
match_blocks.push(format!(
|
||||
" Platform::{platform} => {{\n OnlineArchive {{\n url: \"{download_url}\",\n sha256: \"{sha256}\",\n }}\n }}"
|
||||
));
|
||||
}
|
||||
|
||||
Ok(format!(
|
||||
"pub fn uv_archive(platform: Platform) -> OnlineArchive {{\n match platform {{\n{}\n }}",
|
||||
match_blocks.join(",\n")
|
||||
))
|
||||
}
|
||||
|
||||
fn read_python_rs() -> Result<String, Box<dyn Error>> {
|
||||
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
|
||||
let path = Path::new(&manifest_dir).join("src/python.rs");
|
||||
println!("Reading {}", path.display());
|
||||
let content = fs::read_to_string(path)?;
|
||||
Ok(content)
|
||||
}
|
||||
|
||||
fn update_uv_text(old_text: &str, new_uv_text: &str) -> Result<String, Box<dyn Error>> {
|
||||
let re = Regex::new(r"(?ms)^pub fn uv_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}\s*\n\s*\}").unwrap();
|
||||
if !re.is_match(old_text) {
|
||||
return Err("Could not find uv_archive function block to replace".into());
|
||||
}
|
||||
let new_content = re.replace(old_text, new_uv_text).to_string();
|
||||
println!("Original lines: {}", old_text.lines().count());
|
||||
println!("Updated lines: {}", new_content.lines().count());
|
||||
Ok(new_content)
|
||||
}
|
||||
|
||||
fn write_python_rs(content: &str) -> Result<(), Box<dyn Error>> {
|
||||
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
|
||||
let path = Path::new(&manifest_dir).join("src/python.rs");
|
||||
println!("Writing to {}", path.display());
|
||||
fs::write(path, content)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn main() -> Result<(), Box<dyn Error>> {
|
||||
let new_uv_archive = fetch_uv_release_info()?;
|
||||
let content = read_python_rs()?;
|
||||
let updated_content = update_uv_text(&content, &new_uv_archive)?;
|
||||
write_python_rs(&updated_content)?;
|
||||
println!("Successfully updated uv_archive function in python.rs");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_update_uv_text_with_actual_file() {
|
||||
let content = fs::read_to_string("src/python.rs").unwrap();
|
||||
let original_lines = content.lines().count();
|
||||
|
||||
const EXPECTED_LINES_REMOVED: usize = 38;
|
||||
|
||||
let updated = update_uv_text(&content, "").unwrap();
|
||||
let updated_lines = updated.lines().count();
|
||||
|
||||
assert_eq!(
|
||||
updated_lines,
|
||||
original_lines - EXPECTED_LINES_REMOVED,
|
||||
"Expected line count to decrease by exactly {EXPECTED_LINES_REMOVED} lines (original: {original_lines}, updated: {updated_lines})"
|
||||
);
|
||||
}
|
||||
}
|
|
@ -300,7 +300,7 @@ impl BuildStatement<'_> {
|
|||
|
||||
writeln!(buf, "build {outputs_str}: {action_name} {inputs_str}").unwrap();
|
||||
for (key, value) in self.variables.iter().sorted() {
|
||||
writeln!(buf, " {key} = {}", value).unwrap();
|
||||
writeln!(buf, " {key} = {value}").unwrap();
|
||||
}
|
||||
writeln!(buf).unwrap();
|
||||
|
||||
|
@ -476,7 +476,7 @@ impl FilesHandle for BuildStatement<'_> {
|
|||
let outputs = outputs.into_iter().map(|v| {
|
||||
let v = v.as_ref();
|
||||
let v = if !v.starts_with("$builddir/") && !v.starts_with("$builddir\\") {
|
||||
format!("$builddir/{}", v)
|
||||
format!("$builddir/{v}")
|
||||
} else {
|
||||
v.to_owned()
|
||||
};
|
||||
|
|
|
@ -162,7 +162,7 @@ impl BuildAction for CargoTest {
|
|||
"cargo-nextest",
|
||||
CargoInstall {
|
||||
binary_name: "cargo-nextest",
|
||||
args: "cargo-nextest --version 0.9.57 --locked",
|
||||
args: "cargo-nextest --version 0.9.99 --locked --no-default-features --features default-no-update",
|
||||
},
|
||||
)?;
|
||||
setup_flags(build)
|
||||
|
|
|
@ -19,24 +19,28 @@ use crate::input::BuildInput;
|
|||
pub fn node_archive(platform: Platform) -> OnlineArchive {
|
||||
match platform {
|
||||
Platform::LinuxX64 => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-x64.tar.xz",
|
||||
sha256: "822780369d0ea309e7d218e41debbd1a03f8cdf354ebf8a4420e89f39cc2e612",
|
||||
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-linux-x64.tar.xz",
|
||||
sha256: "325c0f1261e0c61bcae369a1274028e9cfb7ab7949c05512c5b1e630f7e80e12",
|
||||
},
|
||||
Platform::LinuxArm => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-arm64.tar.xz",
|
||||
sha256: "f6df68c6793244071f69023a9b43a0cf0b13d65cbe86d55925c28e4134d9aafb",
|
||||
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-linux-arm64.tar.xz",
|
||||
sha256: "140aee84be6774f5fb3f404be72adbe8420b523f824de82daeb5ab218dab7b18",
|
||||
},
|
||||
Platform::MacX64 => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-x64.tar.xz",
|
||||
sha256: "d4b4ab81ebf1f7aab09714f834992f27270ad0079600da00c8110f8950ca6c5a",
|
||||
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-darwin-x64.tar.xz",
|
||||
sha256: "f79de1f64df4ac68493a344bb5ab7d289d0275271e87b543d1278392c9de778a",
|
||||
},
|
||||
Platform::MacArm => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-arm64.tar.xz",
|
||||
sha256: "f18a7438723d48417f5e9be211a2f3c0520ffbf8e02703469e5153137ca0f328",
|
||||
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-darwin-arm64.tar.xz",
|
||||
sha256: "cc9cc294eaf782dd93c8c51f460da610cc35753c6a9947411731524d16e97914",
|
||||
},
|
||||
Platform::WindowsX64 => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-win-x64.zip",
|
||||
sha256: "893115cd92ad27bf178802f15247115e93c0ef0c753b93dca96439240d64feb5",
|
||||
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-win-x64.zip",
|
||||
sha256: "721ab118a3aac8584348b132767eadf51379e0616f0db802cc1e66d7f0d98f85",
|
||||
},
|
||||
Platform::WindowsArm => OnlineArchive {
|
||||
url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-win-arm64.zip",
|
||||
sha256: "78355dc9ca117bb71d3f081e4b1b281855e2b134f3939bb0ca314f7567b0e621",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -94,7 +98,7 @@ impl BuildAction for YarnInstall<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn with_cmd_ext(bin: &str) -> Cow<str> {
|
||||
fn with_cmd_ext(bin: &str) -> Cow<'_, str> {
|
||||
if cfg!(windows) {
|
||||
format!("{bin}.cmd").into()
|
||||
} else {
|
||||
|
|
|
@ -21,26 +21,26 @@ pub fn protoc_archive(platform: Platform) -> OnlineArchive {
|
|||
match platform {
|
||||
Platform::LinuxX64 => {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-linux-x86_64.zip",
|
||||
sha256: "f90d0dd59065fef94374745627336d622702b67f0319f96cee894d41a974d47a",
|
||||
url: "https://github.com/protocolbuffers/protobuf/releases/download/v31.1/protoc-31.1-linux-x86_64.zip",
|
||||
sha256: "96553041f1a91ea0efee963cb16f462f5985b4d65365f3907414c360044d8065",
|
||||
}
|
||||
}
|
||||
},
|
||||
Platform::LinuxArm => {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-linux-aarch_64.zip",
|
||||
sha256: "f3d8eb5839d6186392d8c7b54fbeabbb6fcdd90618a500b77cb2e24faa245cad",
|
||||
url: "https://github.com/protocolbuffers/protobuf/releases/download/v31.1/protoc-31.1-linux-aarch_64.zip",
|
||||
sha256: "6c554de11cea04c56ebf8e45b54434019b1cd85223d4bbd25c282425e306ecc2",
|
||||
}
|
||||
}
|
||||
},
|
||||
Platform::MacX64 | Platform::MacArm => {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-osx-universal_binary.zip",
|
||||
sha256: "e3324d3bc2e9bc967a0bec2472e0ec73b26f952c7c87f2403197414f780c3c6c",
|
||||
url: "https://github.com/protocolbuffers/protobuf/releases/download/v31.1/protoc-31.1-osx-universal_binary.zip",
|
||||
sha256: "99ea004549c139f46da5638187a85bbe422d78939be0fa01af1aa8ab672e395f",
|
||||
}
|
||||
}
|
||||
Platform::WindowsX64 => {
|
||||
},
|
||||
Platform::WindowsX64 | Platform::WindowsArm => {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-win64.zip",
|
||||
sha256: "3657053024faa439ff5f8c1dd2ee06bac0f9b9a3d660e99944f015a7451e87ec",
|
||||
url: "https://github.com/protocolbuffers/protobuf/releases/download/v31.1/protoc-31.1-win64.zip",
|
||||
sha256: "70381b116ab0d71cb6a5177d9b17c7c13415866603a0fd40d513dafe32d56c35",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -67,7 +67,7 @@ fn clang_format_archive(platform: Platform) -> OnlineArchive {
|
|||
sha256: "238be68d9478163a945754f06a213483473044f5a004c4125d3d9d8d3556466e",
|
||||
}
|
||||
}
|
||||
Platform::WindowsX64 => {
|
||||
Platform::WindowsX64 | Platform::WindowsArm=> {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/ankitects/clang-format-binaries/releases/download/anki-2021-01-09/clang-format_windows_x86_64.zip",
|
||||
sha256: "7d9f6915e3f0fb72407830f0fc37141308d2e6915daba72987a52f309fbeaccc",
|
||||
|
|
|
@ -9,6 +9,7 @@ use maplit::hashmap;
|
|||
|
||||
use crate::action::BuildAction;
|
||||
use crate::archives::download_and_extract;
|
||||
use crate::archives::with_exe;
|
||||
use crate::archives::OnlineArchive;
|
||||
use crate::archives::Platform;
|
||||
use crate::hash::simple_hash;
|
||||
|
@ -16,82 +17,113 @@ use crate::input::BuildInput;
|
|||
use crate::inputs;
|
||||
use crate::Build;
|
||||
|
||||
/// When updating this, pyoxidizer.bzl needs updating too, but it uses different
|
||||
/// files.
|
||||
pub fn python_archive(platform: Platform) -> OnlineArchive {
|
||||
// To update, run 'cargo run --bin update_uv'.
|
||||
// You'll need to do this when bumping Python versions, as uv bakes in
|
||||
// the latest known version.
|
||||
// When updating Python version, make sure to update version tag in BuildWheel
|
||||
// too.
|
||||
pub fn uv_archive(platform: Platform) -> OnlineArchive {
|
||||
match platform {
|
||||
Platform::LinuxX64 => {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-x86_64_v2-unknown-linux-gnu-install_only.tar.gz",
|
||||
sha256: "9426bca501ae0a257392b10719e2e20ff5fa5e22a3ce4599d6ad0b3139f86417",
|
||||
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-x86_64-unknown-linux-gnu.tar.gz",
|
||||
sha256: "909278eb197c5ed0e9b5f16317d1255270d1f9ea4196e7179ce934d48c4c2545",
|
||||
}
|
||||
}
|
||||
},
|
||||
Platform::LinuxArm => {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-aarch64-unknown-linux-gnu-install_only.tar.gz",
|
||||
sha256: "7d19e1ecd6e582423f7c74a0c67491eaa982ce9d5c5f35f0e4289f83127abcb8",
|
||||
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-aarch64-unknown-linux-gnu.tar.gz",
|
||||
sha256: "0b2ad9fe4295881615295add8cc5daa02549d29cc9a61f0578e397efcf12f08f",
|
||||
}
|
||||
}
|
||||
},
|
||||
Platform::MacX64 => {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-x86_64-apple-darwin-install_only.tar.gz",
|
||||
sha256: "5a0bf895a5cb08d6d008140abb41bb2c8cd638a665273f7d8eb258bc89de439b",
|
||||
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-x86_64-apple-darwin.tar.gz",
|
||||
sha256: "d785753ac092e25316180626aa691c5dfe1fb075290457ba4fdb72c7c5661321",
|
||||
}
|
||||
}
|
||||
},
|
||||
Platform::MacArm => {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-aarch64-apple-darwin-install_only.tar.gz",
|
||||
sha256: "bf0cd90204a2cc6da48cae1e4b32f48c9f7031fbe1238c5972104ccb0155d368",
|
||||
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-aarch64-apple-darwin.tar.gz",
|
||||
sha256: "721f532b73171586574298d4311a91d5ea2c802ef4db3ebafc434239330090c6",
|
||||
}
|
||||
}
|
||||
},
|
||||
Platform::WindowsX64 => {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/indygreg/python-build-standalone/releases/download/20240107/cpython-3.9.18+20240107-x86_64-pc-windows-msvc-shared-install_only.tar.gz",
|
||||
sha256: "8f0544cd593984f7ecb90c685931249c579302124b9821064873f3a14ed07005",
|
||||
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-x86_64-pc-windows-msvc.zip",
|
||||
sha256: "e199b10bef1a7cc540014483e7f60f825a174988f41020e9d2a6b01bd60f0669",
|
||||
}
|
||||
},
|
||||
Platform::WindowsArm => {
|
||||
OnlineArchive {
|
||||
url: "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-aarch64-pc-windows-msvc.zip",
|
||||
sha256: "bb40708ad549ad6a12209cb139dd751bf0ede41deb679ce7513ce197bd9ef234",
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the Python binary, which can be used to create venvs.
|
||||
/// Downloads if missing.
|
||||
pub fn setup_python(build: &mut Build) -> Result<()> {
|
||||
// if changing this, make sure you remove out/pyenv
|
||||
let python_binary = match env::var("PYTHON_BINARY") {
|
||||
pub fn setup_uv(build: &mut Build, platform: Platform) -> Result<()> {
|
||||
let uv_binary = match env::var("UV_BINARY") {
|
||||
Ok(path) => {
|
||||
assert!(
|
||||
Utf8Path::new(&path).is_absolute(),
|
||||
"PYTHON_BINARY must be absolute"
|
||||
"UV_BINARY must be absolute"
|
||||
);
|
||||
path.into()
|
||||
}
|
||||
Err(_) => {
|
||||
download_and_extract(
|
||||
build,
|
||||
"python",
|
||||
python_archive(build.host_platform),
|
||||
"uv",
|
||||
uv_archive(platform),
|
||||
hashmap! { "bin" => [
|
||||
if cfg!(windows) { "python.exe" } else { "bin/python3"}
|
||||
] },
|
||||
with_exe("uv")
|
||||
] },
|
||||
)?;
|
||||
inputs![":extract:python:bin"]
|
||||
inputs![":extract:uv:bin"]
|
||||
}
|
||||
};
|
||||
build.add_dependency("python_binary", python_binary);
|
||||
build.add_dependency("uv_binary", uv_binary);
|
||||
|
||||
// Our macOS packaging needs access to the x86 binary on ARM.
|
||||
if cfg!(target_arch = "aarch64") {
|
||||
download_and_extract(
|
||||
build,
|
||||
"uv_mac_x86",
|
||||
uv_archive(Platform::MacX64),
|
||||
hashmap! { "bin" => [
|
||||
with_exe("uv")
|
||||
] },
|
||||
)?;
|
||||
}
|
||||
// Our Linux packaging needs access to the ARM binary on x86
|
||||
if cfg!(target_arch = "x86_64") {
|
||||
download_and_extract(
|
||||
build,
|
||||
"uv_lin_arm",
|
||||
uv_archive(Platform::LinuxArm),
|
||||
hashmap! { "bin" => [
|
||||
with_exe("uv")
|
||||
] },
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub struct PythonEnvironment {
|
||||
pub folder: &'static str,
|
||||
pub base_requirements_txt: BuildInput,
|
||||
pub requirements_txt: BuildInput,
|
||||
pub deps: BuildInput,
|
||||
// todo: rename
|
||||
pub venv_folder: &'static str,
|
||||
pub extra_args: &'static str,
|
||||
pub extra_binary_exports: &'static [&'static str],
|
||||
}
|
||||
|
||||
impl BuildAction for PythonEnvironment {
|
||||
fn command(&self) -> &str {
|
||||
if env::var("OFFLINE_BUILD").is_err() {
|
||||
"$runner pyenv $python_binary $builddir/$pyenv_folder $system_pkgs $base_requirements $requirements"
|
||||
"$runner pyenv $uv_binary $builddir/$pyenv_folder -- $extra_args"
|
||||
} else {
|
||||
"echo 'OFFLINE_BUILD is set. Using the existing PythonEnvironment.'"
|
||||
}
|
||||
|
@ -99,7 +131,7 @@ impl BuildAction for PythonEnvironment {
|
|||
|
||||
fn files(&mut self, build: &mut impl crate::build::FilesHandle) {
|
||||
let bin_path = |binary: &str| -> Vec<String> {
|
||||
let folder = self.folder;
|
||||
let folder = self.venv_folder;
|
||||
let path = if cfg!(windows) {
|
||||
format!("{folder}/scripts/{binary}.exe")
|
||||
} else {
|
||||
|
@ -108,17 +140,24 @@ impl BuildAction for PythonEnvironment {
|
|||
vec![path]
|
||||
};
|
||||
|
||||
build.add_inputs("", &self.deps);
|
||||
build.add_variable("pyenv_folder", self.venv_folder);
|
||||
if env::var("OFFLINE_BUILD").is_err() {
|
||||
build.add_inputs("python_binary", inputs![":python_binary"]);
|
||||
build.add_variable("pyenv_folder", self.folder);
|
||||
build.add_inputs("base_requirements", &self.base_requirements_txt);
|
||||
build.add_inputs("requirements", &self.requirements_txt);
|
||||
build.add_outputs_ext("pip", bin_path("pip"), true);
|
||||
build.add_inputs("uv_binary", inputs![":uv_binary"]);
|
||||
|
||||
// Add --python flag to extra_args if PYTHON_BINARY is set
|
||||
let mut args = self.extra_args.to_string();
|
||||
if let Ok(python_binary) = env::var("PYTHON_BINARY") {
|
||||
args = format!("--python {python_binary} {args}");
|
||||
}
|
||||
build.add_variable("extra_args", args);
|
||||
}
|
||||
|
||||
build.add_outputs_ext("bin", bin_path("python"), true);
|
||||
for binary in self.extra_binary_exports {
|
||||
build.add_outputs_ext(*binary, bin_path(binary), true);
|
||||
}
|
||||
build.add_output_stamp(format!("{}/.stamp", self.venv_folder));
|
||||
}
|
||||
|
||||
fn check_output_timestamps(&self) -> bool {
|
||||
|
@ -154,31 +193,19 @@ impl BuildAction for PythonTypecheck {
|
|||
struct PythonFormat<'a> {
|
||||
pub inputs: &'a BuildInput,
|
||||
pub check_only: bool,
|
||||
pub isort_ini: &'a BuildInput,
|
||||
}
|
||||
|
||||
impl BuildAction for PythonFormat<'_> {
|
||||
fn command(&self) -> &str {
|
||||
"$black -t py39 -q $check --color $in && $
|
||||
$isort --color --settings-path $isort_ini $check $in"
|
||||
"$ruff format $mode $in && $ruff check --select I --fix $in"
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl crate::build::FilesHandle) {
|
||||
build.add_inputs("in", self.inputs);
|
||||
build.add_inputs("black", inputs![":pyenv:black"]);
|
||||
build.add_inputs("isort", inputs![":pyenv:isort"]);
|
||||
build.add_inputs("ruff", inputs![":pyenv:ruff"]);
|
||||
|
||||
let hash = simple_hash(self.inputs);
|
||||
build.add_env_var("BLACK_CACHE_DIR", "out/python/black.cache.{hash}");
|
||||
build.add_inputs("isort_ini", self.isort_ini);
|
||||
build.add_variable(
|
||||
"check",
|
||||
if self.check_only {
|
||||
"--diff --check"
|
||||
} else {
|
||||
""
|
||||
},
|
||||
);
|
||||
build.add_variable("mode", if self.check_only { "--check" } else { "" });
|
||||
|
||||
build.add_output_stamp(format!(
|
||||
"tests/python_format.{}.{hash}",
|
||||
|
@ -188,13 +215,11 @@ impl BuildAction for PythonFormat<'_> {
|
|||
}
|
||||
|
||||
pub fn python_format(build: &mut Build, group: &str, inputs: BuildInput) -> Result<()> {
|
||||
let isort_ini = &inputs![".isort.cfg"];
|
||||
build.add_action(
|
||||
format!("check:format:python:{group}"),
|
||||
PythonFormat {
|
||||
inputs: &inputs,
|
||||
check_only: true,
|
||||
isort_ini,
|
||||
},
|
||||
)?;
|
||||
|
||||
|
@ -203,34 +228,39 @@ pub fn python_format(build: &mut Build, group: &str, inputs: BuildInput) -> Resu
|
|||
PythonFormat {
|
||||
inputs: &inputs,
|
||||
check_only: false,
|
||||
isort_ini,
|
||||
},
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub struct PythonLint {
|
||||
pub struct RuffCheck {
|
||||
pub folders: &'static [&'static str],
|
||||
pub pylint_ini: BuildInput,
|
||||
pub deps: BuildInput,
|
||||
pub check_only: bool,
|
||||
}
|
||||
|
||||
impl BuildAction for PythonLint {
|
||||
impl BuildAction for RuffCheck {
|
||||
fn command(&self) -> &str {
|
||||
"$pylint --rcfile $pylint_ini -sn -j $cpus $folders"
|
||||
"$ruff check $folders $mode"
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl crate::build::FilesHandle) {
|
||||
build.add_inputs("", &self.deps);
|
||||
build.add_inputs("pylint", inputs![":pyenv:pylint"]);
|
||||
build.add_inputs("pylint_ini", &self.pylint_ini);
|
||||
build.add_inputs("", inputs![".ruff.toml"]);
|
||||
build.add_inputs("ruff", inputs![":pyenv:ruff"]);
|
||||
build.add_variable("folders", self.folders.join(" "));
|
||||
// On a 16 core system, values above 10 do not improve wall clock time,
|
||||
// but waste extra cores that could be working on other tests.
|
||||
build.add_variable("cpus", num_cpus::get().min(10).to_string());
|
||||
build.add_variable(
|
||||
"mode",
|
||||
if self.check_only {
|
||||
""
|
||||
} else {
|
||||
"--fix --unsafe-fixes"
|
||||
},
|
||||
);
|
||||
|
||||
let hash = simple_hash(&self.deps);
|
||||
build.add_output_stamp(format!("tests/python_lint.{hash}"));
|
||||
let kind = if self.check_only { "check" } else { "fix" };
|
||||
build.add_output_stamp(format!("tests/python_ruff.{kind}.{hash}"));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -30,12 +30,12 @@ impl Build {
|
|||
)
|
||||
.unwrap();
|
||||
for (key, value) in &self.variables {
|
||||
writeln!(&mut buf, "{} = {}", key, value).unwrap();
|
||||
writeln!(&mut buf, "{key} = {value}").unwrap();
|
||||
}
|
||||
buf.push('\n');
|
||||
|
||||
for (key, value) in &self.pools {
|
||||
writeln!(&mut buf, "pool {}\n depth = {}", key, value).unwrap();
|
||||
writeln!(&mut buf, "pool {key}\n depth = {value}").unwrap();
|
||||
}
|
||||
buf.push('\n');
|
||||
|
||||
|
|
|
@ -15,7 +15,6 @@ camino.workspace = true
|
|||
clap.workspace = true
|
||||
flate2.workspace = true
|
||||
junction.workspace = true
|
||||
reqwest = { workspace = true, features = ["rustls-tls", "rustls-tls-native-roots"] }
|
||||
sha2.workspace = true
|
||||
tar.workspace = true
|
||||
termcolor.workspace = true
|
||||
|
@ -24,3 +23,9 @@ which.workspace = true
|
|||
xz2.workspace = true
|
||||
zip.workspace = true
|
||||
zstd.workspace = true
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
reqwest = { workspace = true, features = ["native-tls"] }
|
||||
|
||||
[target.'cfg(not(windows))'.dependencies]
|
||||
reqwest = { workspace = true, features = ["rustls-tls", "rustls-tls-native-roots"] }
|
||||
|
|
|
@ -65,7 +65,7 @@ fn sha2_data(data: &[u8]) -> String {
|
|||
let mut digest = sha2::Sha256::new();
|
||||
digest.update(data);
|
||||
let result = digest.finalize();
|
||||
format!("{:x}", result)
|
||||
format!("{result:x}")
|
||||
}
|
||||
|
||||
enum CompressionKind {
|
||||
|
|
|
@ -67,7 +67,10 @@ pub fn run_build(args: BuildArgs) {
|
|||
"MYPY_CACHE_DIR",
|
||||
build_root.join("tests").join("mypy").into_string(),
|
||||
)
|
||||
.env("PYTHONPYCACHEPREFIX", build_root.join("pycache"))
|
||||
.env(
|
||||
"PYTHONPYCACHEPREFIX",
|
||||
std::path::absolute(build_root.join("pycache")).unwrap(),
|
||||
)
|
||||
// commands will not show colors by default, as we do not provide a tty
|
||||
.env("FORCE_COLOR", "1")
|
||||
.env("MYPY_FORCE_COLOR", "1")
|
||||
|
@ -135,7 +138,7 @@ fn setup_build_root() -> Utf8PathBuf {
|
|||
true
|
||||
};
|
||||
if create {
|
||||
println!("Switching build root to {}", new_target);
|
||||
println!("Switching build root to {new_target}");
|
||||
std::os::unix::fs::symlink(new_target, build_root).unwrap();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,62 +0,0 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::env;
|
||||
use std::fs;
|
||||
use std::process::Command;
|
||||
|
||||
use camino::Utf8PathBuf;
|
||||
use clap::Args;
|
||||
|
||||
use crate::run::run_command;
|
||||
|
||||
#[derive(Args, Debug)]
|
||||
pub struct BuildArtifactsArgs {
|
||||
bundle_root: Utf8PathBuf,
|
||||
pyoxidizer_bin: String,
|
||||
}
|
||||
|
||||
pub fn build_artifacts(args: BuildArtifactsArgs) {
|
||||
// build.rs doesn't declare inputs from venv, so we need to force a rebuild to
|
||||
// ensure changes to our libs/the venv get included
|
||||
let artifacts = args.bundle_root.join("artifacts");
|
||||
if artifacts.exists() {
|
||||
fs::remove_dir_all(&artifacts).unwrap();
|
||||
}
|
||||
let bundle_root = args.bundle_root.canonicalize_utf8().unwrap();
|
||||
let build_folder = bundle_root.join("build");
|
||||
if build_folder.exists() {
|
||||
fs::remove_dir_all(&build_folder).unwrap();
|
||||
}
|
||||
|
||||
run_command(
|
||||
Command::new(&args.pyoxidizer_bin)
|
||||
.args([
|
||||
"--system-rust",
|
||||
"run-build-script",
|
||||
"qt/bundle/build.rs",
|
||||
"--var",
|
||||
"venv",
|
||||
"out/bundle/pyenv",
|
||||
"--var",
|
||||
"build",
|
||||
build_folder.as_str(),
|
||||
])
|
||||
.env("CARGO_MANIFEST_DIR", "qt/bundle")
|
||||
.env("CARGO_TARGET_DIR", "out/bundle/rust")
|
||||
.env("PROFILE", "release")
|
||||
.env("OUT_DIR", &artifacts)
|
||||
.env("TARGET", env!("TARGET"))
|
||||
.env("SDKROOT", "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk")
|
||||
.env("MACOSX_DEPLOYMENT_TARGET", macos_deployment_target())
|
||||
.env("CARGO_BUILD_TARGET", env!("TARGET")),
|
||||
);
|
||||
}
|
||||
|
||||
pub fn macos_deployment_target() -> &'static str {
|
||||
if env!("TARGET") == "x86_64-apple-darwin" {
|
||||
"10.13.4"
|
||||
} else {
|
||||
"11"
|
||||
}
|
||||
}
|
|
@ -1,53 +0,0 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::process::Command;
|
||||
|
||||
use anki_process::CommandExt;
|
||||
use camino::Utf8Path;
|
||||
use camino::Utf8PathBuf;
|
||||
|
||||
use super::artifacts::macos_deployment_target;
|
||||
use crate::run::run_command;
|
||||
|
||||
pub fn build_bundle_binary() {
|
||||
let mut features = String::from("build-mode-prebuilt-artifacts");
|
||||
if cfg!(target_os = "linux") || cfg!(target_os = "macos") {
|
||||
features.push_str(",global-allocator-jemalloc,allocator-jemalloc");
|
||||
}
|
||||
|
||||
let mut command = Command::new("cargo");
|
||||
command
|
||||
.args([
|
||||
"build",
|
||||
"--manifest-path=qt/bundle/Cargo.toml",
|
||||
"--target-dir=out/bundle/rust",
|
||||
"--release",
|
||||
"--no-default-features",
|
||||
])
|
||||
.arg(format!("--features={features}"))
|
||||
.env(
|
||||
"DEFAULT_PYTHON_CONFIG_RS",
|
||||
// included in main.rs, so relative to qt/bundle/src
|
||||
"../../../out/bundle/artifacts/",
|
||||
)
|
||||
.env(
|
||||
"PYO3_CONFIG_FILE",
|
||||
Utf8Path::new("out/bundle/artifacts/pyo3-build-config-file.txt")
|
||||
.canonicalize_utf8()
|
||||
.unwrap(),
|
||||
)
|
||||
.env("MACOSX_DEPLOYMENT_TARGET", macos_deployment_target())
|
||||
.env("SDKROOT", "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk")
|
||||
.env("CARGO_BUILD_TARGET", env!("TARGET"));
|
||||
if env!("TARGET") == "x86_64-apple-darwin" {
|
||||
let xcode_path = Command::run_with_output(["xcode-select", "-p"]).unwrap();
|
||||
let ld_classic = Utf8PathBuf::from(xcode_path.stdout.trim())
|
||||
.join("Toolchains/XcodeDefault.xctoolchain/usr/bin/ld-classic");
|
||||
if ld_classic.exists() {
|
||||
// work around XCode 15's default linker not supporting macOS 10.15-12.
|
||||
command.env("RUSTFLAGS", format!("-Clink-arg=-fuse-ld={ld_classic}"));
|
||||
}
|
||||
}
|
||||
run_command(&mut command);
|
||||
}
|
|
@ -1,156 +0,0 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::env;
|
||||
use std::fs;
|
||||
use std::process::Command;
|
||||
|
||||
use camino::Utf8Path;
|
||||
use camino::Utf8PathBuf;
|
||||
use clap::Args;
|
||||
use clap::ValueEnum;
|
||||
|
||||
use crate::paths::absolute_msys_path;
|
||||
use crate::paths::unix_path;
|
||||
use crate::run::run_command;
|
||||
|
||||
#[derive(Clone, Copy, ValueEnum, Debug)]
|
||||
enum DistKind {
|
||||
Standard,
|
||||
Alternate,
|
||||
}
|
||||
|
||||
#[derive(Args, Debug)]
|
||||
pub struct BuildDistFolderArgs {
|
||||
kind: DistKind,
|
||||
folder_root: Utf8PathBuf,
|
||||
}
|
||||
|
||||
pub fn build_dist_folder(args: BuildDistFolderArgs) {
|
||||
let BuildDistFolderArgs { kind, folder_root } = args;
|
||||
fs::create_dir_all(&folder_root).unwrap();
|
||||
// Start with Qt, as it's the largest, and we use --delete to ensure there are
|
||||
// no stale files in lib/. Skipped on macOS as Qt is handled later.
|
||||
if !cfg!(target_os = "macos") {
|
||||
copy_qt_from_venv(kind, &folder_root);
|
||||
}
|
||||
clean_top_level_files(&folder_root);
|
||||
copy_binary_and_pylibs(&folder_root);
|
||||
if cfg!(target_os = "linux") {
|
||||
copy_linux_extras(kind, &folder_root);
|
||||
} else if cfg!(windows) {
|
||||
copy_windows_extras(&folder_root);
|
||||
}
|
||||
fs::write(folder_root.with_extension("stamp"), b"").unwrap();
|
||||
}
|
||||
|
||||
fn copy_qt_from_venv(kind: DistKind, folder_root: &Utf8Path) {
|
||||
let python39 = if cfg!(windows) { "" } else { "python3.9/" };
|
||||
let qt_root = match kind {
|
||||
DistKind::Standard => {
|
||||
folder_root.join(format!("../pyenv/lib/{python39}site-packages/PyQt6"))
|
||||
}
|
||||
DistKind::Alternate => {
|
||||
folder_root.join(format!("../pyenv-qt5/lib/{python39}site-packages/PyQt5"))
|
||||
}
|
||||
};
|
||||
let src_path = absolute_msys_path(&qt_root);
|
||||
let lib_path = folder_root.join("lib");
|
||||
fs::create_dir_all(&lib_path).unwrap();
|
||||
let dst_path = with_slash(absolute_msys_path(&lib_path));
|
||||
run_command(Command::new("rsync").args([
|
||||
"-a",
|
||||
"--delete",
|
||||
"--exclude-from",
|
||||
"qt/bundle/qt.exclude",
|
||||
&src_path,
|
||||
&dst_path,
|
||||
]));
|
||||
}
|
||||
|
||||
fn copy_linux_extras(kind: DistKind, folder_root: &Utf8Path) {
|
||||
// add README, installer, etc
|
||||
run_command(Command::new("rsync").args(["-a", "qt/bundle/lin/", &with_slash(folder_root)]));
|
||||
|
||||
// add extra IME plugins from download
|
||||
let lib_path = folder_root.join("lib");
|
||||
let src_path = folder_root
|
||||
.join("../../extracted/linux_qt_plugins")
|
||||
.join(match kind {
|
||||
DistKind::Standard => "qt6",
|
||||
DistKind::Alternate => "qt5",
|
||||
});
|
||||
let dst_path = lib_path.join(match kind {
|
||||
DistKind::Standard => "PyQt6/Qt6/plugins",
|
||||
DistKind::Alternate => "PyQt5/Qt5/plugins",
|
||||
});
|
||||
run_command(Command::new("rsync").args(["-a", &with_slash(src_path), &with_slash(dst_path)]));
|
||||
}
|
||||
|
||||
fn copy_windows_extras(folder_root: &Utf8Path) {
|
||||
run_command(Command::new("rsync").args([
|
||||
"-a",
|
||||
"out/extracted/win_amd64_audio/",
|
||||
&with_slash(folder_root),
|
||||
]));
|
||||
}
|
||||
|
||||
fn clean_top_level_files(folder_root: &Utf8Path) {
|
||||
let mut to_remove = vec![];
|
||||
for entry in fs::read_dir(folder_root).unwrap() {
|
||||
let entry = entry.unwrap();
|
||||
if entry.file_name() == "lib" {
|
||||
continue;
|
||||
} else {
|
||||
to_remove.push(entry.path());
|
||||
}
|
||||
}
|
||||
for path in to_remove {
|
||||
if path.is_dir() {
|
||||
fs::remove_dir_all(path).unwrap()
|
||||
} else {
|
||||
fs::remove_file(path).unwrap()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn with_slash<P>(path: P) -> String
|
||||
where
|
||||
P: AsRef<str>,
|
||||
{
|
||||
format!("{}/", path.as_ref())
|
||||
}
|
||||
|
||||
fn copy_binary_and_pylibs(folder_root: &Utf8Path) {
|
||||
let binary = folder_root
|
||||
.join("../rust")
|
||||
.join(env!("TARGET"))
|
||||
.join("release")
|
||||
.join(if cfg!(windows) { "anki.exe" } else { "anki" });
|
||||
let extra_files = folder_root
|
||||
.join("../build")
|
||||
.join(env!("TARGET"))
|
||||
.join("release/resources/extra_files");
|
||||
run_command(Command::new("rsync").args([
|
||||
"-a",
|
||||
"--exclude",
|
||||
"PyQt6",
|
||||
// misleading, as it misses the GPL PyQt, and our Rust/JS
|
||||
// dependencies
|
||||
"--exclude",
|
||||
"COPYING.txt",
|
||||
&unix_path(&binary),
|
||||
&with_slash(unix_path(&extra_files)),
|
||||
&with_slash(unix_path(folder_root)),
|
||||
]));
|
||||
let google_py = if cfg!(windows) {
|
||||
folder_root.join("../pyenv/lib/site-packages/google")
|
||||
} else {
|
||||
folder_root.join("../pyenv/lib/python3.9/site-packages/google")
|
||||
};
|
||||
run_command(Command::new("rsync").args([
|
||||
"-a",
|
||||
&unix_path(&google_py),
|
||||
&with_slash(unix_path(&folder_root.join("lib"))),
|
||||
]));
|
||||
}
|
|
@ -7,7 +7,6 @@
|
|||
|
||||
mod archive;
|
||||
mod build;
|
||||
mod bundle;
|
||||
mod paths;
|
||||
mod pyenv;
|
||||
mod rsync;
|
||||
|
@ -19,11 +18,6 @@ use archive::archive_command;
|
|||
use archive::ArchiveArgs;
|
||||
use build::run_build;
|
||||
use build::BuildArgs;
|
||||
use bundle::artifacts::build_artifacts;
|
||||
use bundle::artifacts::BuildArtifactsArgs;
|
||||
use bundle::binary::build_bundle_binary;
|
||||
use bundle::folder::build_dist_folder;
|
||||
use bundle::folder::BuildDistFolderArgs;
|
||||
use clap::Parser;
|
||||
use clap::Subcommand;
|
||||
use pyenv::setup_pyenv;
|
||||
|
@ -48,9 +42,6 @@ enum Command {
|
|||
Rsync(RsyncArgs),
|
||||
Run(RunArgs),
|
||||
Build(BuildArgs),
|
||||
BuildArtifacts(BuildArtifactsArgs),
|
||||
BuildBundleBinary,
|
||||
BuildDistFolder(BuildDistFolderArgs),
|
||||
#[clap(subcommand)]
|
||||
Archive(ArchiveArgs),
|
||||
}
|
||||
|
@ -62,9 +53,6 @@ fn main() -> Result<()> {
|
|||
Command::Rsync(args) => rsync_files(args),
|
||||
Command::Yarn(args) => setup_yarn(args),
|
||||
Command::Build(args) => run_build(args),
|
||||
Command::BuildArtifacts(args) => build_artifacts(args),
|
||||
Command::BuildBundleBinary => build_bundle_binary(),
|
||||
Command::BuildDistFolder(args) => build_dist_folder(args),
|
||||
Command::Archive(args) => archive_command(args)?,
|
||||
};
|
||||
Ok(())
|
||||
|
|
|
@ -16,8 +16,3 @@ pub fn absolute_msys_path(path: &Utf8Path) -> String {
|
|||
// and \ -> /
|
||||
format!("/{drive}/{}", path[7..].replace('\\', "/"))
|
||||
}
|
||||
|
||||
/// Converts backslashes to forward slashes
|
||||
pub fn unix_path(path: &Utf8Path) -> String {
|
||||
path.as_str().replace('\\', "/")
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::fs;
|
||||
use std::process::Command;
|
||||
|
||||
use camino::Utf8Path;
|
||||
|
@ -10,12 +11,10 @@ use crate::run::run_command;
|
|||
|
||||
#[derive(Args)]
|
||||
pub struct PyenvArgs {
|
||||
python_bin: String,
|
||||
uv_bin: String,
|
||||
pyenv_folder: String,
|
||||
initial_reqs: String,
|
||||
reqs: Vec<String>,
|
||||
#[arg(long, allow_hyphen_values(true))]
|
||||
venv_args: Vec<String>,
|
||||
#[arg(trailing_var_arg = true)]
|
||||
extra_args: Vec<String>,
|
||||
}
|
||||
|
||||
/// Set up a venv if one doesn't already exist, and then sync packages with
|
||||
|
@ -23,35 +22,32 @@ pub struct PyenvArgs {
|
|||
pub fn setup_pyenv(args: PyenvArgs) {
|
||||
let pyenv_folder = Utf8Path::new(&args.pyenv_folder);
|
||||
|
||||
let pyenv_bin_folder = pyenv_folder.join(if cfg!(windows) { "scripts" } else { "bin" });
|
||||
let pyenv_python = pyenv_bin_folder.join("python");
|
||||
let pip_sync = pyenv_bin_folder.join("pip-sync");
|
||||
|
||||
if !pyenv_python.exists() {
|
||||
run_command(
|
||||
Command::new(&args.python_bin)
|
||||
.args(["-m", "venv"])
|
||||
.args(args.venv_args)
|
||||
.arg(pyenv_folder),
|
||||
);
|
||||
|
||||
if cfg!(windows) {
|
||||
// the first install on Windows throws an error the first time pip is upgraded,
|
||||
// so we install it twice and swallow the first error
|
||||
let _output = Command::new(&pyenv_python)
|
||||
.args(["-m", "pip", "install", "-r", &args.initial_reqs])
|
||||
.output()
|
||||
.unwrap();
|
||||
// On first run, ninja creates an empty bin/ folder which breaks the initial
|
||||
// install. But we don't want to indiscriminately remove the folder, or
|
||||
// macOS Gatekeeper needs to rescan the files each time.
|
||||
if pyenv_folder.exists() {
|
||||
let cache_tag = pyenv_folder.join("CACHEDIR.TAG");
|
||||
if !cache_tag.exists() {
|
||||
fs::remove_dir_all(pyenv_folder).expect("Failed to remove existing pyenv folder");
|
||||
}
|
||||
|
||||
run_command(Command::new(pyenv_python).args([
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"-r",
|
||||
&args.initial_reqs,
|
||||
]));
|
||||
}
|
||||
|
||||
run_command(Command::new(pip_sync).args(&args.reqs));
|
||||
let mut command = Command::new(args.uv_bin);
|
||||
|
||||
// remove UV_* environment variables to avoid interference
|
||||
for (key, _) in std::env::vars() {
|
||||
if key.starts_with("UV_") || key == "VIRTUAL_ENV" {
|
||||
command.env_remove(key);
|
||||
}
|
||||
}
|
||||
|
||||
run_command(
|
||||
command
|
||||
.env("UV_PROJECT_ENVIRONMENT", args.pyenv_folder.clone())
|
||||
.args(["sync", "--locked", "--no-config"])
|
||||
.args(args.extra_args),
|
||||
);
|
||||
|
||||
// Write empty stamp file
|
||||
fs::write(pyenv_folder.join(".stamp"), "").expect("Failed to write stamp file");
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::io::ErrorKind;
|
||||
use std::process::Command;
|
||||
|
||||
use anki_io::create_dir_all;
|
||||
|
@ -44,7 +43,7 @@ fn split_env(s: &str) -> Result<(String, String), std::io::Error> {
|
|||
if let Some((k, v)) = s.split_once('=') {
|
||||
Ok((k.into(), v.into()))
|
||||
} else {
|
||||
Err(std::io::Error::new(ErrorKind::Other, "invalid env var"))
|
||||
Err(std::io::Error::other("invalid env var"))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -84,7 +83,7 @@ fn split_args(args: Vec<String>) -> Vec<Vec<String>> {
|
|||
|
||||
pub fn run_command(command: &mut Command) {
|
||||
if let Err(err) = command.ensure_success() {
|
||||
println!("{}", err);
|
||||
println!("{err}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,7 +28,11 @@ pub fn setup_yarn(args: YarnArgs) {
|
|||
.arg("--ignore-scripts"),
|
||||
);
|
||||
} else {
|
||||
run_command(Command::new(&args.yarn_bin).arg("install"));
|
||||
run_command(
|
||||
Command::new(&args.yarn_bin)
|
||||
.arg("install")
|
||||
.arg("--immutable"),
|
||||
);
|
||||
}
|
||||
|
||||
std::fs::write(args.stamp, b"").unwrap();
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
[toolchain]
|
||||
channel = "nightly-2023-09-02"
|
||||
channel = "nightly-2025-03-20"
|
||||
profile = "minimal"
|
||||
components = ["rustfmt"]
|
||||
|
|
8123
cargo/licenses.json
8123
cargo/licenses.json
File diff suppressed because it is too large
Load diff
|
@ -85,7 +85,7 @@ When formatting issues are reported, they can be fixed with
|
|||
./ninja format
|
||||
```
|
||||
|
||||
## Fixing eslint/copyright header issues
|
||||
## Fixing ruff/eslint/copyright header issues
|
||||
|
||||
```
|
||||
./ninja fix
|
||||
|
@ -190,13 +190,10 @@ in the collection2.log file will also be printed on stdout.
|
|||
|
||||
If ANKI_PROFILE_CODE is set, Python profiling data will be written on exit.
|
||||
|
||||
# Binary Bundles
|
||||
# Installer/launcher
|
||||
|
||||
Anki's official binary packages are created with `./ninja bundle`. The bundling
|
||||
process was created specifically for the official builds, and is provided as-is;
|
||||
we are unfortunately not able to provide assistance with any issues you may run
|
||||
into when using it. You'll need to run
|
||||
`git submodule update --checkout qt/bundle/PyOxidizer` first.
|
||||
- The anki-release package is created/published with the scripts in qt/release.
|
||||
- The installer/launcher is created with the build scripts in qt/launcher/{platform}.
|
||||
|
||||
## Mixing development and study
|
||||
|
||||
|
|
|
@ -1,35 +1,78 @@
|
|||
# This Dockerfile uses three stages.
|
||||
# 1. Compile anki (and dependencies) and build python wheels.
|
||||
# 2. Create a virtual environment containing anki and its dependencies.
|
||||
# 3. Create a final image that only includes anki's virtual environment and required
|
||||
# system packages.
|
||||
# This is a user-contributed Dockerfile. No official support is available.
|
||||
|
||||
ARG PYTHON_VERSION="3.9"
|
||||
ARG DEBIAN_FRONTEND="noninteractive"
|
||||
|
||||
# Build anki.
|
||||
FROM python:$PYTHON_VERSION AS build
|
||||
RUN curl -fsSL https://github.com/bazelbuild/bazelisk/releases/download/v1.7.4/bazelisk-linux-amd64 \
|
||||
> /usr/local/bin/bazel \
|
||||
&& chmod +x /usr/local/bin/bazel \
|
||||
# Bazel expects /usr/bin/python
|
||||
&& ln -s /usr/local/bin/python /usr/bin/python
|
||||
FROM ubuntu:24.04 AS build
|
||||
WORKDIR /opt/anki
|
||||
COPY . .
|
||||
# Build python wheels.
|
||||
ENV PYTHON_VERSION="3.13"
|
||||
|
||||
|
||||
# System deps
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
curl \
|
||||
git \
|
||||
build-essential \
|
||||
pkg-config \
|
||||
libssl-dev \
|
||||
libbz2-dev \
|
||||
libreadline-dev \
|
||||
libsqlite3-dev \
|
||||
libffi-dev \
|
||||
zlib1g-dev \
|
||||
liblzma-dev \
|
||||
ca-certificates \
|
||||
ninja-build \
|
||||
rsync \
|
||||
libglib2.0-0 \
|
||||
libgl1 \
|
||||
libx11-6 \
|
||||
libxext6 \
|
||||
libxrender1 \
|
||||
libxkbcommon0 \
|
||||
libxkbcommon-x11-0 \
|
||||
libxcb1 \
|
||||
libxcb-render0 \
|
||||
libxcb-shm0 \
|
||||
libxcb-icccm4 \
|
||||
libxcb-image0 \
|
||||
libxcb-keysyms1 \
|
||||
libxcb-randr0 \
|
||||
libxcb-shape0 \
|
||||
libxcb-xfixes0 \
|
||||
libxcb-xinerama0 \
|
||||
libxcb-xinput0 \
|
||||
libsm6 \
|
||||
libice6 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# install rust with rustup
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
ENV PATH="/root/.cargo/bin:${PATH}"
|
||||
|
||||
# Install uv and Python 3.13 with uv
|
||||
RUN curl -LsSf https://astral.sh/uv/install.sh | sh \
|
||||
&& ln -s /root/.local/bin/uv /usr/local/bin/uv
|
||||
ENV PATH="/root/.local/bin:${PATH}"
|
||||
|
||||
RUN uv python install ${PYTHON_VERSION} --default
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN ./tools/build
|
||||
|
||||
|
||||
# Install pre-compiled Anki.
|
||||
FROM python:${PYTHON_VERSION}-slim as installer
|
||||
FROM python:3.13-slim AS installer
|
||||
WORKDIR /opt/anki/
|
||||
COPY --from=build /opt/anki/wheels/ wheels/
|
||||
COPY --from=build /opt/anki/out/wheels/ wheels/
|
||||
# Use virtual environment.
|
||||
RUN python -m venv venv \
|
||||
&& ./venv/bin/python -m pip install --no-cache-dir setuptools wheel \
|
||||
&& ./venv/bin/python -m pip install --no-cache-dir /opt/anki/wheels/*.whl
|
||||
|
||||
|
||||
# We use another build stage here so we don't include the wheels in the final image.
|
||||
FROM python:${PYTHON_VERSION}-slim as final
|
||||
FROM python:3.13-slim AS final
|
||||
COPY --from=installer /opt/anki/venv /opt/anki/venv
|
||||
ENV PATH=/opt/anki/venv/bin:$PATH
|
||||
# Install run-time dependencies.
|
||||
|
@ -59,9 +102,9 @@ RUN apt-get update \
|
|||
libxrender1 \
|
||||
libxtst6 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Add non-root user.
|
||||
RUN useradd --create-home anki
|
||||
USER anki
|
||||
WORKDIR /work
|
||||
ENTRYPOINT ["/opt/anki/venv/bin/anki"]
|
||||
LABEL maintainer="Jakub Kaczmarzyk <jakub.kaczmarzyk@gmail.com>"
|
||||
ENTRYPOINT ["/opt/anki/venv/bin/anki"]
|
84
docs/language_bridge.md
Normal file
84
docs/language_bridge.md
Normal file
|
@ -0,0 +1,84 @@
|
|||
Anki's codebase uses three layers.
|
||||
|
||||
1. The web frontend, created in Svelte and typescript,
|
||||
2. The Python layer and
|
||||
3. The core Rust layer.
|
||||
|
||||
Each layer can can makes RPC (Remote Procedure Call) to the layers below it. While it should be avoided, Python can also invoke Typescript functions. The Rust layers never make calls to the other layers. Note that it can make RPC to AnkiWeb and other servers, which is out of scope of this document.
|
||||
|
||||
In this document we'll provide examples of bridge between languages, explaining:
|
||||
|
||||
- where the RPC is declared,
|
||||
- where it is called (with the appropriate imports) and
|
||||
- where it is implemented.
|
||||
|
||||
Imitating those examples should allow you to make call and create new RPCs.
|
||||
|
||||
## Declaring RPCs
|
||||
|
||||
Let's consider the method `NewDeck` of `DecksServices`. It's declared in [decks.proto](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/proto/anki/decks.proto#L14) as `rpc NewDeck(generic.Empty) returns (Deck);`. This means this methods takes no argument (technically, an argument containing no information), and returns a [`Deck`](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/proto/anki/decks.proto#L54).
|
||||
|
||||
Read [protobuf](./protobuf.md) to learn more about how those input and output types are defined.
|
||||
|
||||
If the RPC implementation is in Python, it should be declared in the service [frontend.proto](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/proto/anki/frontend.proto#L24C3-L24C66)'s `FrontendService`. RPCs declared in any other services are implemented in Rust.
|
||||
|
||||
## Making a Remote Procedure Call
|
||||
|
||||
In this section we'll consider how to make Remote Procedure Call (RPC) from languages used in Anki. Languages used for AnkiDroid and AnkiMobile are out of scope of this document.
|
||||
|
||||
### Making a RPC from Python
|
||||
|
||||
Python can invoke the `NewDeck` method with [`col._backend.new_deck()`](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/pylib/anki/decks.py#L168). This python method takes no argument and returns a `Deck` value.
|
||||
|
||||
However, most Python code should not call this method directly. Instead it should call [`col.decks.new_deck()`](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/pylib/anki/decks.py#L166). Generally speaking, all back-end functions called from Python should be called through a helper method defined in `pylib/anki/`. The `_backend` part is an implementation detail that most callers should ignore. This is especially important because add-ons should expect a relatively stable API independent of the implementation details of the RPC.
|
||||
|
||||
### Invoking method from TypeScript
|
||||
|
||||
Let's consider the method [`rpc GetCsvMetadata(CsvMetadataRequest) returns (CsvMetadata);`](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/proto/anki/import_export.proto#L20) from `ImportExportService`..
|
||||
|
||||
It's used in the TypeScript class [`ImportCsvState`](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/ts/routes/import-csv/lib.ts#L102), as an asynchronous function. It's argument is a single javascript object, whose keys are as in [`CsvMetadataRequest`](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/proto/anki/import_export.proto#L138) and it returns a `CsvMetadata`.
|
||||
|
||||
The method was imported with `import { getCsvMetadata } from "@generated/backend";` and the types were imported with `import type { CsvMetadata } from "@generated/anki/import_export_pb";`. Note that it was not necessary to import the input type given that it's simply an untyped javascript object.
|
||||
|
||||
## Implementation
|
||||
|
||||
Let's now look at implementations of those RPCs.
|
||||
|
||||
### Implementation in Rust
|
||||
|
||||
The method NewDeck is implemented in Rust's [DecksService](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/rslib/src/decks/service.rs#L21) as `fn new_deck(&mut self) -> error::Result<anki_proto::decks::Deck>`. It should be noted that the method name was changed from Pascal case to snake case, and the rps's argument of type `generic.Empty` is ignored.
|
||||
|
||||
### Implementation in Python
|
||||
|
||||
Let's consider the implementation of the method [DeckOptionsRequireClose](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/qt/aqt/mediasrv.py#L578). It's defined as `def deck_options_require_close() -> bytes:`. In this case, there should be a returned value. However, it'll be ignored, so returning `b""` is perfectly fine.
|
||||
|
||||
Note that the incoming HTTP request is not processed on the main thread. In order to do any work with the GUI, we should call `aqt.mw.taskman.run_on_main`.
|
||||
|
||||
## Invoking a TypeScript method from Python
|
||||
|
||||
This case should be avoided if possible, as we generally should avoid
|
||||
calls to the upper layer. Contrary to the previous cases, we don't use
|
||||
protobuf.
|
||||
|
||||
### Calling a TS function.
|
||||
|
||||
Let's take as Example [`export function getTypedAnswer(): string | null`](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/ts/reviewer/index.ts#L35). It's an exported function, and its return type can be encoded in JSON.
|
||||
|
||||
It's called in the Reviewer class through [`self.web.evalWithCallback("getTypedAnswer();", self._onTypedAnswer)`](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/qt/aqt/reviewer.py#L785). The result is then sent to [`_onTypedAnswer`](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/qt/aqt/reviewer.py#L787).
|
||||
|
||||
If no return value is needed, `web.eval` would have been sufficient.
|
||||
|
||||
### Calling a Svelte method
|
||||
|
||||
Let's now consider the case where the method we want to call is implemented in a Svelte library. Let's take as example [`deckOptionsPendingChanges`](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/ts/routes/deck-options/%5BdeckId%5D/%2Bpage.svelte#L17). We define it with:
|
||||
|
||||
```js
|
||||
globalThis.anki || = {};
|
||||
globalThis.anki.methodName = async (): Promise<void>=>{body}
|
||||
```
|
||||
|
||||
Note that if the function is asynchronous, you can't directly send the
|
||||
result to a callback. Instead your function will have to call a post
|
||||
method that will be sent to Python or Rust.
|
||||
|
||||
This method is called in [deckoptions.py](https://github.com/ankitects/anki/blob/acaeee91fa853e4a7a78dcddbb832d009ec3529a/qt/aqt/deckoptions.py#L68) with `self.web.eval("anki.deckOptionsPendingChanges();"`.
|
|
@ -8,7 +8,7 @@ mentioned there no longer apply:
|
|||
https://forums.ankiweb.net/t/guide-how-to-build-and-run-anki-from-source-with-xubuntu-20-04/12865
|
||||
|
||||
You can see a full list of buildtime and runtime requirements by looking at the
|
||||
[Dockerfiles](../.buildkite/linux/docker/Dockerfile.amd64) used to build the
|
||||
[Dockerfile](../.buildkite/linux/docker/Dockerfile) used to build the
|
||||
official releases.
|
||||
|
||||
**Ensure some basic tools are installed**:
|
||||
|
@ -51,13 +51,8 @@ Anki requires a recent glibc.
|
|||
|
||||
If you are using a distro that uses musl, Anki will not work.
|
||||
|
||||
If your glibc version is 2.35+ on AMD64 or 2.39+ on ARM64, you can skip the rest of this section.
|
||||
|
||||
If your system has an older glibc, you won't be able to use the PyQt wheels that are
|
||||
available in pip/PyPy, and will need to use your system-installed PyQt instead.
|
||||
Your distro will also need to have Python 3.9 or later.
|
||||
|
||||
After installing the system libraries (eg:
|
||||
You can use your system's Qt libraries if they are Qt 6.2 or later, if
|
||||
you wish. After installing the system libraries (eg:
|
||||
'sudo apt install python3-pyqt6.qt{quick,webengine} python3-venv pyqt6-dev-tools'),
|
||||
find the place they are installed (eg '/usr/lib/python3/dist-packages'). On modern Ubuntu, you'll
|
||||
also need 'sudo apt remove python3-protobuf'. Then before running any commands like './run', tell Anki where
|
||||
|
@ -68,12 +63,6 @@ export PYTHONPATH=/usr/lib/python3/dist-packages
|
|||
export PYTHON_BINARY=/usr/bin/python3
|
||||
```
|
||||
|
||||
There are a few things to be aware of:
|
||||
|
||||
- You should use ./run and not tools/run-qt5\*, even if your system libraries are Qt5.
|
||||
- If your system libraries are Qt5, when creating an aqt wheel, the wheel will not work
|
||||
on Qt6 environments.
|
||||
|
||||
## Packaging considerations
|
||||
|
||||
Python, node and protoc are downloaded as part of the build. You can optionally define
|
||||
|
|
|
@ -1,3 +1,9 @@
|
|||
ProtoBuf is a format used both to save data in storage and transmit
|
||||
data between services. You can think of it as similar to JSON with
|
||||
schemas, given that you can use basic types, list and records. Except
|
||||
that it's usually transmitted and saved in an efficient byteform and
|
||||
not in a human readable way.
|
||||
|
||||
# Protocol Buffers
|
||||
|
||||
Anki uses [different implementations of Protocol Buffers](./architecture.md#protobuf)
|
||||
|
@ -92,12 +98,6 @@ should preferably be assigned a number between 1 and 15. If a message contains
|
|||
|
||||
Protobuf has an official Python implementation with an extensive [reference](https://developers.google.com/protocol-buffers/docs/reference/python-generated).
|
||||
|
||||
- Every message used in aqt or pylib must be added to the respective `.pylintrc`
|
||||
to avoid failing type checks. The unqualified protobuf message's name must be
|
||||
used, not an alias from `collection.py` for example. This should be taken into
|
||||
account when choosing a message name in order to prevent skipping typechecking
|
||||
a Python class of the same name.
|
||||
|
||||
### Typescript
|
||||
|
||||
Anki uses [protobuf-es](https://github.com/bufbuild/protobuf-es), which offers
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM rust:1.83.0-alpine3.20 AS builder
|
||||
FROM rust:1.85.0-alpine3.20 AS builder
|
||||
|
||||
ARG ANKI_VERSION
|
||||
|
||||
|
@ -7,6 +7,7 @@ RUN apk update && apk add --no-cache build-base protobuf && rm -rf /var/cache/ap
|
|||
RUN cargo install --git https://github.com/ankitects/anki.git \
|
||||
--tag ${ANKI_VERSION} \
|
||||
--root /anki-server \
|
||||
--locked \
|
||||
anki-sync-server
|
||||
|
||||
FROM alpine:3.21.0
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM rust:1.83.0 AS builder
|
||||
FROM rust:1.85.0 AS builder
|
||||
|
||||
ARG ANKI_VERSION
|
||||
|
||||
|
@ -7,6 +7,7 @@ RUN apt-get update && apt-get install -y build-essential protobuf-compiler && ap
|
|||
RUN cargo install --git https://github.com/ankitects/anki.git \
|
||||
--tag ${ANKI_VERSION} \
|
||||
--root /anki-server \
|
||||
--locked \
|
||||
anki-sync-server
|
||||
|
||||
FROM gcr.io/distroless/cc-debian12
|
||||
|
|
|
@ -9,7 +9,12 @@ You must be running 64 bit Windows 10, version 1703 or newer.
|
|||
**Rustup**:
|
||||
|
||||
As mentioned in development.md, rustup must be installed. If you're on
|
||||
ARM Windows, you must set the default target to x86_64-pc-windows-msvc.
|
||||
ARM Windows and install the ARM64 version of rust-up, from this project folder,
|
||||
run
|
||||
|
||||
```
|
||||
rustup target add x86_64-pc-windows-msvc
|
||||
```
|
||||
|
||||
**Visual Studio**:
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 0fe0162f4a18e8ef2fbac1d9a33af8e38cf7260e
|
||||
Subproject commit 480ef0da728c7ea3485c58529ae7ee02be3e5dba
|
|
@ -50,6 +50,8 @@ actions-select = Select
|
|||
actions-shortcut-key = Shortcut key: { $val }
|
||||
actions-suspend-card = Suspend Card
|
||||
actions-set-due-date = Set Due Date
|
||||
actions-toggle-load-balancer = Toggle Load Balancer
|
||||
actions-grade-now = Grade Now
|
||||
actions-answer-card = Answer Card
|
||||
actions-unbury-unsuspend = Unbury/Unsuspend
|
||||
actions-add-deck = Add Deck
|
||||
|
@ -70,7 +72,6 @@ actions-previous-card-info = Previous Card Info
|
|||
# input is required before it can be performed. E.g. "Export..." vs. "Delete".
|
||||
actions-with-ellipsis = { $action }...
|
||||
actions-fullscreen-unsupported = Full screen mode is not supported for your video driver. Try switching to a different one from the preferences screen.
|
||||
|
||||
actions-flag-number = Flag { $number }
|
||||
|
||||
## The same translation may used for two independent actions:
|
||||
|
@ -93,3 +94,4 @@ actions-nothing-to-redo = Nothing to redo
|
|||
actions-auto-advance = Auto Advance
|
||||
actions-auto-advance-activated = Auto Advance enabled
|
||||
actions-auto-advance-deactivated = Auto Advance disabled
|
||||
actions-processing = Processing...
|
||||
|
|
|
@ -60,7 +60,6 @@ card-templates-this-will-create-card-proceed =
|
|||
}
|
||||
card-templates-type-boxes-warning = Only one typing box per card template is supported.
|
||||
card-templates-restore-to-default = Restore to Default
|
||||
card-templates-restore-to-default-confirmation = This will reset all fields and templates in this note type to their default
|
||||
values, removing any extra fields/templates and their content, and any custom styling. Do you wish to proceed?
|
||||
card-templates-restore-to-default-confirmation = This will reset all fields and templates in this note type to their default values, removing any extra fields/templates and their content, and any custom styling. Do you wish to proceed?
|
||||
card-templates-restored-to-default = Note type has been restored to its original state.
|
||||
|
||||
|
|
|
@ -5,6 +5,11 @@ database-check-card-properties =
|
|||
[one] Fixed { $count } invalid card property.
|
||||
*[other] Fixed { $count } invalid card properties.
|
||||
}
|
||||
database-check-card-last-review-time-empty =
|
||||
{ $count ->
|
||||
[one] Added last review time to { $count } card.
|
||||
*[other] Added last review time to { $count } cards.
|
||||
}
|
||||
database-check-missing-templates =
|
||||
{ $count ->
|
||||
[one] Deleted { $count } card with missing template.
|
||||
|
|
|
@ -132,7 +132,7 @@ deck-config-bury-priority-tooltip =
|
|||
learning or review cards, and you may see both a review sibling and new sibling in the
|
||||
same session.
|
||||
|
||||
## Ordering section
|
||||
## Gather order and sort order of cards
|
||||
|
||||
deck-config-ordering-title = Display Order
|
||||
deck-config-new-gather-priority = New card gather order
|
||||
|
@ -151,12 +151,6 @@ deck-config-new-gather-priority-tooltip-2 =
|
|||
`Random notes`: Picks notes at random, then gathers all of its cards.
|
||||
|
||||
`Random cards`: Gathers cards in a random order.
|
||||
deck-config-new-gather-priority-deck = Deck
|
||||
deck-config-new-gather-priority-deck-then-random-notes = Deck, then random notes
|
||||
deck-config-new-gather-priority-position-lowest-first = Ascending position
|
||||
deck-config-new-gather-priority-position-highest-first = Descending position
|
||||
deck-config-new-gather-priority-random-notes = Random notes
|
||||
deck-config-new-gather-priority-random-cards = Random cards
|
||||
deck-config-new-card-sort-order = New card sort order
|
||||
deck-config-new-card-sort-order-tooltip-2 =
|
||||
`Card type, then order gathered`: Shows cards in order of card type number.
|
||||
|
@ -176,11 +170,6 @@ deck-config-new-card-sort-order-tooltip-2 =
|
|||
in order.
|
||||
|
||||
`Random`: Shows cards in a random order.
|
||||
deck-config-sort-order-card-template-then-random = Card type, then random
|
||||
deck-config-sort-order-random-note-then-template = Random note, then card type
|
||||
deck-config-sort-order-random = Random
|
||||
deck-config-sort-order-template-then-gather = Card type, then order gathered
|
||||
deck-config-sort-order-gather = Order gathered
|
||||
deck-config-new-review-priority = New/review order
|
||||
deck-config-new-review-priority-tooltip = When to show new cards in relation to review cards.
|
||||
deck-config-interday-step-priority = Interday learning/review order
|
||||
|
@ -190,9 +179,6 @@ deck-config-interday-step-priority-tooltip =
|
|||
The review limit is always applied first to interday learning cards, and
|
||||
then review cards. This option will control the order the gathered cards are shown in,
|
||||
but interday learning cards will always be gathered first.
|
||||
deck-config-review-mix-mix-with-reviews = Mix with reviews
|
||||
deck-config-review-mix-show-after-reviews = Show after reviews
|
||||
deck-config-review-mix-show-before-reviews = Show before reviews
|
||||
deck-config-review-sort-order = Review sort order
|
||||
deck-config-review-sort-order-tooltip =
|
||||
The default order prioritizes cards that have been waiting longest, so that
|
||||
|
@ -200,22 +186,64 @@ deck-config-review-sort-order-tooltip =
|
|||
first. If you have a large backlog that will take more than a few days to
|
||||
clear, or wish to see cards in subdeck order, you may find the alternate
|
||||
sort orders preferable.
|
||||
deck-config-sort-order-due-date-then-random = Due date, then random
|
||||
deck-config-sort-order-due-date-then-deck = Due date, then deck
|
||||
deck-config-sort-order-deck-then-due-date = Deck, then due date
|
||||
deck-config-sort-order-ascending-intervals = Ascending intervals
|
||||
deck-config-sort-order-descending-intervals = Descending intervals
|
||||
deck-config-sort-order-ascending-ease = Ascending ease
|
||||
deck-config-sort-order-descending-ease = Descending ease
|
||||
deck-config-sort-order-ascending-difficulty = Easy cards first
|
||||
deck-config-sort-order-descending-difficulty = Difficult cards first
|
||||
deck-config-sort-order-retrievability-ascending = Ascending retrievability
|
||||
deck-config-sort-order-retrievability-descending = Descending retrievability
|
||||
|
||||
deck-config-display-order-will-use-current-deck =
|
||||
Anki will use the display order from the deck you
|
||||
select to study, and not any subdecks it may have.
|
||||
|
||||
## Gather order and sort order of cards – Combobox entries
|
||||
|
||||
# Gather new cards ordered by deck.
|
||||
deck-config-new-gather-priority-deck = Deck
|
||||
# Gather new cards ordered by deck, then ordered by random notes, ensuring all cards of the same note are grouped together.
|
||||
deck-config-new-gather-priority-deck-then-random-notes = Deck, then random notes
|
||||
# Gather new cards ordered by position number, ascending (lowest to highest).
|
||||
deck-config-new-gather-priority-position-lowest-first = Ascending position
|
||||
# Gather new cards ordered by position number, descending (highest to lowest).
|
||||
deck-config-new-gather-priority-position-highest-first = Descending position
|
||||
# Gather the cards ordered by random notes, ensuring all cards of the same note are grouped together.
|
||||
deck-config-new-gather-priority-random-notes = Random notes
|
||||
# Gather new cards randomly.
|
||||
deck-config-new-gather-priority-random-cards = Random cards
|
||||
# Sort the cards first by their type, in ascending order (alphabetically), then randomized within each type.
|
||||
deck-config-sort-order-card-template-then-random = Card type, then random
|
||||
# Sort the notes first randomly, then the cards by their type, in ascending order (alphabetically), within each note.
|
||||
deck-config-sort-order-random-note-then-template = Random note, then card type
|
||||
# Sort the cards randomly.
|
||||
deck-config-sort-order-random = Random
|
||||
# Sort the cards first by their type, in ascending order (alphabetically), then by the order they were gathered, in ascending order (oldest to newest).
|
||||
deck-config-sort-order-template-then-gather = Card type, then order gathered
|
||||
# Sort the cards by the order they were gathered, in ascending order (oldest to newest).
|
||||
deck-config-sort-order-gather = Order gathered
|
||||
# How new cards or interday learning cards are mixed with review cards.
|
||||
deck-config-review-mix-mix-with-reviews = Mix with reviews
|
||||
# How new cards or interday learning cards are mixed with review cards.
|
||||
deck-config-review-mix-show-after-reviews = Show after reviews
|
||||
# How new cards or interday learning cards are mixed with review cards.
|
||||
deck-config-review-mix-show-before-reviews = Show before reviews
|
||||
# Sort the cards first by due date, in ascending order (oldest due date to newest), then randomly within the same due date.
|
||||
deck-config-sort-order-due-date-then-random = Due date, then random
|
||||
# Sort the cards first by due date, in ascending order (oldest due date to newest), then by deck within the same due date.
|
||||
deck-config-sort-order-due-date-then-deck = Due date, then deck
|
||||
# Sort the cards first by deck, then by due date in ascending order (oldest due date to newest) within the same deck.
|
||||
deck-config-sort-order-deck-then-due-date = Deck, then due date
|
||||
# Sort the cards by the interval, in ascending order (shortest to longest).
|
||||
deck-config-sort-order-ascending-intervals = Ascending intervals
|
||||
# Sort the cards by the interval, in descending order (longest to shortest).
|
||||
deck-config-sort-order-descending-intervals = Descending intervals
|
||||
# Sort the cards by ease, in ascending order (lowest to highest ease).
|
||||
deck-config-sort-order-ascending-ease = Ascending ease
|
||||
# Sort the cards by ease, in descending order (highest to lowest ease).
|
||||
deck-config-sort-order-descending-ease = Descending ease
|
||||
# Sort the cards by difficulty, in ascending order (easiest to hardest).
|
||||
deck-config-sort-order-ascending-difficulty = Easy cards first
|
||||
# Sort the cards by difficulty, in descending order (hardest to easiest).
|
||||
deck-config-sort-order-descending-difficulty = Difficult cards first
|
||||
# Sort the cards by retrievability percentage, in ascending order (0% to 100%, least retrievable to most easily retrievable).
|
||||
deck-config-sort-order-retrievability-ascending = Ascending retrievability
|
||||
# Sort the cards by retrievability percentage, in descending order (100% to 0%, most easily retrievable to least retrievable).
|
||||
deck-config-sort-order-retrievability-descending = Descending retrievability
|
||||
|
||||
## Timer section
|
||||
|
||||
deck-config-timer-title = Timers
|
||||
|
@ -279,20 +307,22 @@ deck-config-new-interval-tooltip = The multiplier applied to a review interval w
|
|||
deck-config-minimum-interval-tooltip = The minimum interval given to a review card after answering `Again`.
|
||||
deck-config-custom-scheduling = Custom scheduling
|
||||
deck-config-custom-scheduling-tooltip = Affects the entire collection. Use at your own risk!
|
||||
# Easy Days section
|
||||
|
||||
## Easy Days section.
|
||||
|
||||
deck-config-easy-days-title = Easy Days
|
||||
deck-config-easy-days-monday = Monday
|
||||
deck-config-easy-days-tuesday = Tuesday
|
||||
deck-config-easy-days-wednesday = Wednesday
|
||||
deck-config-easy-days-thursday = Thursday
|
||||
deck-config-easy-days-friday = Friday
|
||||
deck-config-easy-days-saturday = Saturday
|
||||
deck-config-easy-days-sunday = Sunday
|
||||
deck-config-easy-days-monday = Mon
|
||||
deck-config-easy-days-tuesday = Tue
|
||||
deck-config-easy-days-wednesday = Wed
|
||||
deck-config-easy-days-thursday = Thu
|
||||
deck-config-easy-days-friday = Fri
|
||||
deck-config-easy-days-saturday = Sat
|
||||
deck-config-easy-days-sunday = Sun
|
||||
deck-config-easy-days-normal = Normal
|
||||
deck-config-easy-days-reduced = Reduced
|
||||
deck-config-easy-days-minimum = Minimum
|
||||
deck-config-easy-days-no-normal-days = At least one day should be set to '{ deck-config-easy-days-normal }'.
|
||||
deck-config-easy-days-change = Existing reviews will not be rescheduled unless '{ deck-config-reschedule-cards-on-change }' is enabled in the FSRS options.
|
||||
|
||||
## Adding/renaming
|
||||
|
||||
|
@ -342,6 +372,8 @@ deck-config-learning-step-above-graduating-interval = The graduating interval sh
|
|||
deck-config-good-above-easy = The easy interval should be at least as long as the graduating interval.
|
||||
deck-config-relearning-steps-above-minimum-interval = The minimum lapse interval should be at least as long as your final relearning step.
|
||||
deck-config-maximum-answer-secs-above-recommended = Anki can schedule your reviews more efficiently when you keep each question short.
|
||||
deck-config-too-short-maximum-interval = A maximum interval less than 6 months is not recommended.
|
||||
deck-config-ignore-before-info = (Approximately) { $included }/{ $totalCards } cards will be used to optimize the FSRS parameters.
|
||||
|
||||
## Selecting a deck
|
||||
|
||||
|
@ -352,8 +384,6 @@ deck-config-which-deck = Which deck would you like to display options for?
|
|||
deck-config-updating-cards = Updating cards: { $current_cards_count }/{ $total_cards_count }...
|
||||
deck-config-invalid-parameters = The provided FSRS parameters are invalid. Leave them blank to use the default parameters.
|
||||
deck-config-not-enough-history = Insufficient review history to perform this operation.
|
||||
deck-config-unable-to-determine-desired-retention =
|
||||
Unable to determine a minimum recommended retention.
|
||||
deck-config-must-have-400-reviews =
|
||||
{ $count ->
|
||||
[one] Only { $count } review was found.
|
||||
|
@ -362,18 +392,18 @@ deck-config-must-have-400-reviews =
|
|||
# Numbers that control how aggressively the FSRS algorithm schedules cards
|
||||
deck-config-weights = FSRS parameters
|
||||
deck-config-compute-optimal-weights = Optimize FSRS parameters
|
||||
deck-config-compute-minimum-recommended-retention = Minimum recommended retention
|
||||
deck-config-optimize-button = Optimize Current Preset
|
||||
# Indicates that a given function or label, provided via the "text" variable, operates slowly.
|
||||
deck-config-slow-suffix = { $text } (slow)
|
||||
deck-config-compute-button = Compute
|
||||
deck-config-ignore-before = Ignore cards reviewed before
|
||||
deck-config-time-to-optimize = It's been a while - using the Optimize All button is recommended.
|
||||
deck-config-time-to-optimize = It's been a while - using the Optimize All Presets button is recommended.
|
||||
deck-config-evaluate-button = Evaluate
|
||||
deck-config-desired-retention = Desired retention
|
||||
deck-config-historical-retention = Historical retention
|
||||
deck-config-smaller-is-better = Smaller numbers indicate a better fit to your review history.
|
||||
deck-config-steps-too-large-for-fsrs = When FSRS is enabled, steps of 1 day or more are not recommended.
|
||||
deck-config-get-params = Get Params
|
||||
deck-config-predicted-minimum-recommended-retention = Minimum recommended retention: { $num }
|
||||
deck-config-complete = { $num }% complete.
|
||||
deck-config-iterations = Iteration: { $count }...
|
||||
deck-config-reschedule-cards-on-change = Reschedule cards on change
|
||||
|
@ -391,6 +421,8 @@ deck-config-desired-retention-tooltip =
|
|||
less frequently, and you will forget more of them. Be conservative when adjusting this - higher
|
||||
values will greatly increase your workload, and lower values can be demoralizing when you forget
|
||||
a lot of material.
|
||||
deck-config-desired-retention-tooltip2 =
|
||||
The workload values provided by the info box are a rough approximation. For a greater level of accuracy, use the simulator.
|
||||
deck-config-historical-retention-tooltip =
|
||||
When some of your review history is missing, FSRS needs to fill in the gaps. By default, it will
|
||||
assume that when you did those old reviews, you remembered 90% of the material. If your old retention
|
||||
|
@ -432,28 +464,27 @@ deck-config-compute-optimal-weights-tooltip2 =
|
|||
By default, parameters will be calculated from the review history of all decks using the current preset. You can
|
||||
optionally adjust the search before calculating the parameters, if you'd like to alter which cards are used for
|
||||
optimizing the parameters.
|
||||
deck-config-compute-optimal-retention-tooltip4 =
|
||||
This tool will attempt to find the desired retention value
|
||||
that will lead to the most material learnt, in the least amount of time. The calculated number can serve as a reference
|
||||
when deciding what to set your desired retention to. You may wish to choose a higher desired retention if you’re
|
||||
willing to invest more study time to achieve it. Setting your desired retention lower than the minimum
|
||||
is not recommended, as it will lead to a higher workload, because of the high forgetting rate.
|
||||
|
||||
deck-config-please-save-your-changes-first = Please save your changes first.
|
||||
deck-config-a-100-day-interval =
|
||||
{ $days ->
|
||||
[one] A 100 day interval will become { $days } day.
|
||||
*[other] A 100 day interval will become { $days } days.
|
||||
}
|
||||
deck-config-workload-factor-change = Approximate workload: {$factor}x
|
||||
(compared to {$previousDR}% desired retention)
|
||||
deck-config-workload-factor-unchanged = The higher this value, the more frequently cards will be shown to you.
|
||||
deck-config-desired-retention-too-low = Your desired retention is very low, which can lead to very long intervals.
|
||||
deck-config-desired-retention-too-high = Your desired retention is very high, which can lead to very short intervals.
|
||||
|
||||
deck-config-percent-of-reviews =
|
||||
{ $reviews ->
|
||||
[one] { $pct }% of { $reviews } review
|
||||
*[other] { $pct }% of { $reviews } reviews
|
||||
}
|
||||
deck-config-percent-input = { $pct }%
|
||||
# This message appears during FSRS parameter optimization.
|
||||
deck-config-checking-for-improvement = Checking for improvement...
|
||||
deck-config-optimizing-preset = Optimizing preset { $current_count }/{ $total_count }...
|
||||
deck-config-fsrs-must-be-enabled = FSRS must be enabled first.
|
||||
deck-config-fsrs-params-optimal = The FSRS parameters currently appear to be optimal.
|
||||
deck-config-fsrs-params-no-reviews = No reviews found. Please check that this preset is assigned to all decks you want to optimize (including subdecks) and try again.
|
||||
|
||||
deck-config-fsrs-params-no-reviews = No reviews found. Make sure this preset is assigned to all decks (including subdecks) that you want to optimize, and try again.
|
||||
|
||||
deck-config-wait-for-audio = Wait for audio
|
||||
deck-config-show-reminder = Show Reminder
|
||||
|
@ -465,18 +496,63 @@ deck-config-desired-retention-below-optimal = Your desired retention is below op
|
|||
# Description of the y axis in the FSRS simulation
|
||||
# diagram (Deck options -> FSRS) showing the total number of
|
||||
# cards that can be recalled or retrieved on a specific date.
|
||||
deck-config-fsrs-simulator-experimental = FSRS simulator (experimental)
|
||||
deck-config-fsrs-simulator-experimental = FSRS Simulator (Experimental)
|
||||
deck-config-fsrs-simulate-desired-retention-experimental = FSRS Desired Retention Simulator (Experimental)
|
||||
deck-config-fsrs-simulate-save-preset = After optimizing, please save your deck preset before running the simulator.
|
||||
deck-config-fsrs-desired-retention-help-me-decide-experimental = Help Me Decide (Experimental)
|
||||
deck-config-additional-new-cards-to-simulate = Additional new cards to simulate
|
||||
deck-config-simulate = Simulate
|
||||
deck-config-clear-last-simulate = Clear last simulation
|
||||
deck-config-clear-last-simulate = Clear Last Simulation
|
||||
deck-config-fsrs-simulator-radio-count = Reviews
|
||||
deck-config-advanced-settings = Advanced Settings
|
||||
deck-config-smooth-graph = Smooth graph
|
||||
deck-config-suspend-leeches = Suspend leeches
|
||||
deck-config-save-options-to-preset = Save Changes to Preset
|
||||
deck-config-save-options-to-preset-confirm = Overwrite the options in your current preset with the options that are currently set in the simulator?
|
||||
# Radio button in the FSRS simulation diagram (Deck options -> FSRS) selecting
|
||||
# to show the total number of cards that can be recalled or retrieved on a
|
||||
# specific date.
|
||||
deck-config-fsrs-simulator-radio-memorized = Memorized
|
||||
deck-config-fsrs-simulator-radio-ratio = Time / Memorized Ratio
|
||||
# $time here is pre-formatted e.g. "10 Seconds"
|
||||
deck-config-fsrs-simulator-ratio-tooltip = { $time } per memorized card
|
||||
|
||||
## Messages related to the FSRS scheduler’s health check. The health check determines whether the correlation between FSRS predictions and your memory is good or bad. It can be optionally triggered as part of the "Optimize" function.
|
||||
|
||||
# Checkbox
|
||||
deck-config-health-check = Check health when optimizing
|
||||
# Message box showing the result of the health check
|
||||
deck-config-fsrs-bad-fit-warning = Health Check:
|
||||
Your memory is difficult for FSRS to predict. Recommendations:
|
||||
|
||||
- Suspend or reformulate any cards you constantly forget.
|
||||
- Use the answer buttons consistently. Keep in mind that "Hard" is a passing grade, not a failing grade.
|
||||
- Understand before you memorize.
|
||||
|
||||
If you follow these suggestions, performance will usually improve over the next few months.
|
||||
# Message box showing the result of the health check
|
||||
deck-config-fsrs-good-fit = Health Check:
|
||||
FSRS can adapt to your memory well.
|
||||
|
||||
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
|
||||
|
||||
deck-config-unable-to-determine-desired-retention =
|
||||
Unable to determine a minimum recommended retention.
|
||||
deck-config-predicted-minimum-recommended-retention = Minimum recommended retention: { $num }
|
||||
deck-config-compute-minimum-recommended-retention = Minimum recommended retention
|
||||
deck-config-compute-optimal-retention-tooltip4 =
|
||||
This tool will attempt to find the desired retention value
|
||||
that will lead to the most material learnt, in the least amount of time. The calculated number can serve as a reference
|
||||
when deciding what to set your desired retention to. You may wish to choose a higher desired retention if you’re
|
||||
willing to invest more study time to achieve it. Setting your desired retention lower than the minimum
|
||||
is not recommended, as it will lead to a higher workload, because of the high forgetting rate.
|
||||
deck-config-plotted-on-x-axis = (Plotted on the X-axis)
|
||||
deck-config-a-100-day-interval =
|
||||
{ $days ->
|
||||
[one] A 100 day interval will become { $days } day.
|
||||
*[other] A 100 day interval will become { $days } days.
|
||||
}
|
||||
|
||||
deck-config-fsrs-simulator-y-axis-title-time = Review Time/Day
|
||||
deck-config-fsrs-simulator-y-axis-title-count = Review Count/Day
|
||||
deck-config-fsrs-simulator-y-axis-title-memorized = Memorized Total
|
||||
|
@ -508,6 +584,8 @@ deck-config-compute-optimal-retention-tooltip =
|
|||
if it significantly differs from 0.9, it's a sign that the time you've allocated each day is either too low
|
||||
or too high for the amount of cards you're trying to learn. This number can be useful as a reference, but it
|
||||
is not recommended to copy it into the desired retention field.
|
||||
deck-config-health-check-tooltip1 = This will show a warning if FSRS struggles to adapt to your memory.
|
||||
deck-config-health-check-tooltip2 = Health check is performed only when using Optimize Current Preset.
|
||||
|
||||
deck-config-compute-optimal-retention = Compute minimum recommended retention
|
||||
deck-config-predicted-optimal-retention = Minimum recommended retention: { $num }
|
||||
|
|
|
@ -5,25 +5,17 @@ decks-create-deck = Create Deck
|
|||
decks_create_even_if_empty = Create/update this deck even if empty
|
||||
decks-custom-steps-in-minutes = Custom steps (in minutes)
|
||||
decks-deck = Deck
|
||||
decks-decreasing-intervals = Decreasing intervals
|
||||
decks-delete-deck = Delete Deck
|
||||
decks-enable-second-filter = Enable second filter
|
||||
decks-filter = Filter:
|
||||
decks-filter-2 = Filter 2
|
||||
decks-get-shared = Get Shared
|
||||
decks-import-file = Import File
|
||||
decks-increasing-intervals = Increasing intervals
|
||||
decks-latest-added-first = Latest added first
|
||||
decks-limit-to = Limit to
|
||||
decks-minutes = minutes
|
||||
decks-most-lapses = Most lapses
|
||||
decks-new-deck-name = New deck name:
|
||||
decks-no-deck = [no deck]
|
||||
decks-oldest-seen-first = Oldest seen first
|
||||
decks-order-added = Order added
|
||||
decks-order-due = Order due
|
||||
decks-please-select-something = Please select something.
|
||||
decks-random = Random
|
||||
decks-repeat-failed-cards-after = Delay Repeat failed cards after
|
||||
# e.g. "Delay for Again", "Delay for Hard", "Delay for Good"
|
||||
decks-delay-for-button = Delay for { $button }
|
||||
|
@ -37,7 +29,27 @@ decks-learn-header = Learn
|
|||
decks-review-header = Due
|
||||
decks-zero-minutes-hint = (0 = return card to original deck)
|
||||
|
||||
## Sort order of cards
|
||||
|
||||
# Combobox entry: Sort the cards by the date they were added, in ascending order (oldest to newest)
|
||||
decks-order-added = Order added
|
||||
# Combobox entry: Sort the cards by the date they were added, in descending order (newest to oldest)
|
||||
decks-latest-added-first = Latest added first
|
||||
# Combobox entry: Sort the cards by due date, in ascending order (oldest due date to newest)
|
||||
decks-order-due = Order due
|
||||
# Combobox entry: Sort the cards by the number of lapses, in descending order (most lapses to least lapses)
|
||||
decks-most-lapses = Most lapses
|
||||
# Combobox entry: Sort the cards by the interval, in ascending order (shortest to longest)
|
||||
decks-increasing-intervals = Increasing intervals
|
||||
# Combobox entry: Sort the cards by the interval, in descending order (longest to shortest)
|
||||
decks-decreasing-intervals = Decreasing intervals
|
||||
# Combobox entry: Sort the cards by the last review date, in ascending order (oldest seen to newest seen)
|
||||
decks-oldest-seen-first = Oldest seen first
|
||||
# Combobox entry: Sort the cards in random order
|
||||
decks-random = Random
|
||||
|
||||
## These strings are no longer used - you do not need to translate them if they
|
||||
## are not already translated.
|
||||
|
||||
# Combobox entry: Sort the cards by relative overdueness, in descending order (most overdue to least overdue)
|
||||
decks-relative-overdueness = Relative overdueness
|
||||
|
|
|
@ -96,6 +96,7 @@ editing-image-occlusion-rectangle-tool = Rectangle
|
|||
editing-image-occlusion-ellipse-tool = Ellipse
|
||||
editing-image-occlusion-polygon-tool = Polygon
|
||||
editing-image-occlusion-text-tool = Text
|
||||
editing-image-occlusion-fill-tool = Fill with colour
|
||||
editing-image-occlusion-toggle-mask-editor = Toggle Mask Editor
|
||||
editing-image-occlusion-reset = Reset Image Occlusion
|
||||
editing-image-occlusion-confirm-reset = Are you sure you want to reset this image occlusion?
|
||||
|
|
|
@ -15,6 +15,7 @@ importing-colon = Colon
|
|||
importing-comma = Comma
|
||||
importing-empty-first-field = Empty first field: { $val }
|
||||
importing-field-separator = Field separator
|
||||
importing-field-separator-guessed = Field separator (guessed)
|
||||
importing-field-mapping = Field mapping
|
||||
importing-field-of-file-is = Field <b>{ $val }</b> of file is:
|
||||
importing-fields-separated-by = Fields separated by: { $val }
|
||||
|
@ -47,6 +48,7 @@ importing-merge-notetypes-help =
|
|||
Warning: This will require a one-way sync, and may mark existing notes as modified.
|
||||
importing-mnemosyne-20-deck-db = Mnemosyne 2.0 Deck (*.db)
|
||||
importing-multicharacter-separators-are-not-supported-please = Multi-character separators are not supported. Please enter one character only.
|
||||
importing-new-deck-will-be-created = A new deck will be created: { $name }
|
||||
importing-notes-added-from-file = Notes added from file: { $val }
|
||||
importing-notes-found-in-file = Notes found in file: { $val }
|
||||
importing-notes-skipped-as-theyre-already-in = Notes skipped, as up-to-date copies are already in your collection: { $val }
|
||||
|
@ -64,7 +66,6 @@ importing-with-deck-configs-help =
|
|||
If enabled, any deck options that the deck sharer included will also be imported.
|
||||
Otherwise, all decks will be assigned the default preset.
|
||||
importing-packaged-anki-deckcollection-apkg-colpkg-zip = Packaged Anki Deck/Collection (*.apkg *.colpkg *.zip)
|
||||
importing-pauker-18-lesson-paugz = Pauker 1.8 Lesson (*.pau.gz)
|
||||
# the '|' character
|
||||
importing-pipe = Pipe
|
||||
# Warning displayed when the csv import preview table is clipped (some columns were hidden)
|
||||
|
@ -77,7 +78,6 @@ importing-rows-had-num1d-fields-expected-num2d = '{ $row }' had { $found } field
|
|||
importing-selected-file-was-not-in-utf8 = Selected file was not in UTF-8 format. Please see the importing section of the manual.
|
||||
importing-semicolon = Semicolon
|
||||
importing-skipped = Skipped
|
||||
importing-supermemo-xml-export-xml = Supermemo XML export (*.xml)
|
||||
importing-tab = Tab
|
||||
importing-tag-modified-notes = Tag modified notes:
|
||||
importing-text-separated-by-tabs-or-semicolons = Text separated by tabs or semicolons (*)
|
||||
|
@ -217,6 +217,9 @@ importing-field-separator-help =
|
|||
Please note that if this character appears in any field itself, the field has to be
|
||||
quoted accordingly to the CSV standard. Spreadsheet programs like LibreOffice will
|
||||
do this automatically.
|
||||
|
||||
It cannot be changed if the text file forces use of a specific separator via a file header.
|
||||
If a file header is not present, Anki will try to guess what the separator is.
|
||||
importing-allow-html-in-fields-help =
|
||||
Enable this if the file contains HTML formatting. E.g. if the file contains the string
|
||||
'<br>', it will appear as a line break on your card. On the other hand, with this
|
||||
|
@ -248,3 +251,5 @@ importing-importing-collection = Importing collection...
|
|||
importing-unable-to-import-filename = Unable to import { $filename }: file type not supported
|
||||
importing-notes-that-could-not-be-imported = Notes that could not be imported as note type has changed: { $val }
|
||||
importing-added = Added
|
||||
importing-pauker-18-lesson-paugz = Pauker 1.8 Lesson (*.pau.gz)
|
||||
importing-supermemo-xml-export-xml = Supermemo XML export (*.xml)
|
||||
|
|
|
@ -34,7 +34,7 @@ preferences-when-adding-default-to-current-deck = When adding, default to curren
|
|||
preferences-you-can-restore-backups-via-fileswitch = You can restore backups via File > Switch Profile.
|
||||
preferences-legacy-timezone-handling = Legacy timezone handling (buggy, but required for AnkiDroid <= 2.14)
|
||||
preferences-default-search-text = Default search text
|
||||
preferences-default-search-text-example = eg. 'deck:current '
|
||||
preferences-default-search-text-example = e.g. "deck:current"
|
||||
preferences-theme = Theme
|
||||
preferences-theme-follow-system = Follow System
|
||||
preferences-theme-light = Light
|
||||
|
@ -83,6 +83,15 @@ preferences-ankiweb-intro = AnkiWeb is a free service that lets you keep your fl
|
|||
preferences-ankihub-intro = AnkiHub provides collaborative deck editing and additional study tools. A paid subscription is required to access certain features.
|
||||
preferences-third-party-description = Third-party services are unaffiliated with and not endorsed by Anki. Use of these services may require payment.
|
||||
|
||||
## URL scheme related
|
||||
preferences-url-schemes = URL Schemes
|
||||
preferences-url-scheme-prompt = Allowed URL Schemes (space-separated):
|
||||
preferences-url-scheme-warning = Blocked attempt to open `{ $link }`, which may be a security issue.
|
||||
|
||||
If you trust the deck author and wish to proceed, you can add `{ $scheme }` to your allowed URL Schemes.
|
||||
preferences-url-scheme-allow-once = Allow Once
|
||||
preferences-url-scheme-always-allow = Always Allow
|
||||
|
||||
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
|
||||
|
||||
preferences-basic = Basic
|
||||
|
|
|
@ -172,6 +172,11 @@ scheduling-set-due-date-done =
|
|||
[one] Set due date of { $cards } card.
|
||||
*[other] Set due date of { $cards } cards.
|
||||
}
|
||||
scheduling-graded-cards-done =
|
||||
{ $cards ->
|
||||
[one] Graded { $cards } card.
|
||||
*[other] Graded { $cards } cards.
|
||||
}
|
||||
scheduling-forgot-cards =
|
||||
{ $cards ->
|
||||
[one] Reset { $cards } card.
|
||||
|
|
|
@ -80,7 +80,7 @@ statistics-reviews =
|
|||
# This fragment of the tooltip in the FSRS simulation
|
||||
# diagram (Deck options -> FSRS) shows the total number of
|
||||
# cards that can be recalled or retrieved on a specific date.
|
||||
statistics-memorized = {$memorized} memorized
|
||||
statistics-memorized = {$memorized} cards memorized
|
||||
statistics-today-title = Today
|
||||
statistics-today-again-count = Again count:
|
||||
statistics-today-type-counts = Learn: { $learnCount }, Review: { $reviewCount }, Relearn: { $relearnCount }, Filtered: { $filteredCount }
|
||||
|
@ -99,9 +99,9 @@ statistics-counts-relearning-cards = Relearning
|
|||
statistics-counts-title = Card Counts
|
||||
statistics-counts-separate-suspended-buried-cards = Separate suspended/buried cards
|
||||
|
||||
## True Retention represents your actual retention rate from past reviews, in
|
||||
## comparison to the "desired retention" parameter of FSRS, which forecasts
|
||||
## future retention. True Retention is the percentage of all reviewed cards
|
||||
## Retention represents your actual retention from past reviews, in
|
||||
## comparison to the "desired retention" setting of FSRS, which forecasts
|
||||
## future retention. Retention is the percentage of all reviewed cards
|
||||
## that were marked as "Hard," "Good," or "Easy" within a specific time period.
|
||||
##
|
||||
## Most of these strings are used as column / row headings in a table.
|
||||
|
@ -112,8 +112,9 @@ statistics-counts-separate-suspended-buried-cards = Separate suspended/buried ca
|
|||
## N.B. Stats cards may be very small on mobile devices and when the Stats
|
||||
## window is certain sizes.
|
||||
|
||||
statistics-true-retention-title = True Retention
|
||||
statistics-true-retention-title = Retention
|
||||
statistics-true-retention-subtitle = Pass rate of cards with an interval ≥ 1 day.
|
||||
statistics-true-retention-tooltip = If you are using FSRS, your retention is expected to be close to your desired retention. Please keep in mind that data for a single day is noisy, so it's better to look at monthly data.
|
||||
statistics-true-retention-range = Range
|
||||
statistics-true-retention-pass = Pass
|
||||
statistics-true-retention-fail = Fail
|
||||
|
@ -148,7 +149,7 @@ statistics-card-ease-title = Card Ease
|
|||
statistics-card-difficulty-title = Card Difficulty
|
||||
statistics-card-stability-title = Card Stability
|
||||
statistics-card-stability-subtitle = The delay at which retrievability falls to 90%.
|
||||
statistics-average-stability = Average stability
|
||||
statistics-median-stability = Median stability
|
||||
statistics-card-retrievability-title = Card Retrievability
|
||||
statistics-card-ease-subtitle = The lower the ease, the more frequently a card will appear.
|
||||
statistics-card-difficulty-subtitle2 = The higher the difficulty, the slower stability will increase.
|
||||
|
@ -228,6 +229,7 @@ statistics-stability-day-single =
|
|||
# hour range, eg "From 14:00-15:00"
|
||||
statistics-hours-range = From { $hourStart }:00~{ $hourEnd }:00
|
||||
statistics-hours-correct = { $correct }/{ $total } correct ({ $percent }%)
|
||||
statistics-hours-correct-info = → (not 'Again')
|
||||
# the emoji depicts the graph displaying this number
|
||||
statistics-hours-reviews = 📊 { $reviews } reviews
|
||||
# the emoji depicts the graph displaying this number
|
||||
|
@ -254,12 +256,20 @@ statistics-elapsed-time-years = { $amount }y
|
|||
##
|
||||
|
||||
statistics-average-for-days-studied = Average for days studied
|
||||
# This term is used in a variety of contexts to refers to the total amount of
|
||||
# items (e.g., cards, mature cards, etc) for a given period, rather than the
|
||||
# total of all existing items.
|
||||
statistics-total = Total
|
||||
statistics-days-studied = Days studied
|
||||
statistics-average-answer-time-label = Average answer time
|
||||
statistics-average = Average
|
||||
statistics-average-interval = Average interval
|
||||
statistics-median-interval = Median interval
|
||||
statistics-due-tomorrow = Due tomorrow
|
||||
# This string, ‘Daily load,’ appears in the ‘Future due’ table and represents a
|
||||
# forecasted estimate of the number of cards expected to be reviewed daily in
|
||||
# the future. Unlike the other strings in the table that display actual data
|
||||
# derived from the current scheduling (e.g., ‘Average’, ‘Due tomorrow’),
|
||||
# ‘Daily load’ is a projection based on the given data.
|
||||
statistics-daily-load = Daily load
|
||||
# eg 5 of 15 (33.3%)
|
||||
statistics-amount-of-total-with-percentage = { $amount } of { $total } ({ $percent }%)
|
||||
|
@ -279,11 +289,19 @@ statistics-cards-per-day =
|
|||
[one] { $count } card/day
|
||||
*[other] { $count } cards/day
|
||||
}
|
||||
statistics-average-ease = Average ease
|
||||
statistics-average-difficulty = Average difficulty
|
||||
statistics-median-ease = Median ease
|
||||
statistics-median-difficulty = Median difficulty
|
||||
statistics-average-retrievability = Average retrievability
|
||||
statistics-estimated-total-knowledge = Estimated total knowledge
|
||||
statistics-save-pdf = Save PDF
|
||||
statistics-saved = Saved.
|
||||
statistics-stats = stats
|
||||
statistics-title = Statistics
|
||||
|
||||
## These strings are no longer used - you do not need to translate them if they
|
||||
## are not already translated.
|
||||
|
||||
statistics-average-stability = Average stability
|
||||
statistics-average-interval = Average interval
|
||||
statistics-average-ease = Average ease
|
||||
statistics-average-difficulty = Average difficulty
|
||||
|
|
|
@ -46,6 +46,20 @@ studying-type-answer-unknown-field = Type answer: unknown field { $val }
|
|||
studying-unbury = Unbury
|
||||
studying-what-would-you-like-to-unbury = What would you like to unbury?
|
||||
studying-you-havent-recorded-your-voice-yet = You haven't recorded your voice yet.
|
||||
studying-card-studied-in-minute =
|
||||
{ $cards ->
|
||||
[one] { $cards } card
|
||||
*[other] { $cards } cards
|
||||
} studied in
|
||||
{ $minutes ->
|
||||
[one] { $minutes } minute.
|
||||
*[other] { $minutes } minutes.
|
||||
}
|
||||
studying-question-time-elapsed = Question time elapsed
|
||||
studying-answer-time-elapsed = Answer time elapsed
|
||||
|
||||
## OBSOLETE; you do not need to translate this
|
||||
|
||||
studying-card-studied-in =
|
||||
{ $count ->
|
||||
[one] { $count } card studied in
|
||||
|
@ -56,5 +70,3 @@ studying-minute =
|
|||
[one] { $count } minute.
|
||||
*[other] { $count } minutes.
|
||||
}
|
||||
studying-question-time-elapsed = Question time elapsed
|
||||
studying-answer-time-elapsed = Answer time elapsed
|
||||
|
|
|
@ -50,10 +50,11 @@ sync-account-required =
|
|||
A free account is required to keep your collection synchronized. Please <a href="{ $link }">sign up</a> for an account, then enter your details below.
|
||||
sync-sanity-check-failed = Please use the Check Database function, then sync again. If problems persist, please force a one-way sync in the preferences screen.
|
||||
sync-clock-off = Unable to sync - your clock is not set to the correct time.
|
||||
# “details” expands to a string such as “300.14 MB > 300.00 MB”
|
||||
sync-upload-too-large =
|
||||
Your collection file is too large to send to AnkiWeb. You can reduce its
|
||||
size by removing any unwanted decks (optionally exporting them first), and
|
||||
then using Check Database to shrink the file size down. ({ $details })
|
||||
Your collection file is too large to send to AnkiWeb. You can reduce its size by removing any unwanted decks (optionally exporting them first), and then using Check Database to shrink the file size down.
|
||||
|
||||
{ $details } (uncompressed)
|
||||
sync-sign-in = Sign in
|
||||
sync-ankihub-dialog-heading = AnkiHub Login
|
||||
sync-ankihub-username-label = Username or Email:
|
||||
|
|
|
@ -1,36 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
"""
|
||||
Tool to extract core strings and keys from .ftl files.
|
||||
"""
|
||||
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
|
||||
from fluent.syntax import parse
|
||||
from fluent.syntax.ast import Junk, Message
|
||||
from fluent.syntax.serializer import serialize_element
|
||||
|
||||
root = ".."
|
||||
ftl_files = glob.glob(os.path.join(root, "ftl", "core", "*.ftl"), recursive=True)
|
||||
keys_by_value: dict[str, list[str]] = {}
|
||||
|
||||
for path in ftl_files:
|
||||
obj = parse(open(path, encoding="utf8").read(), with_spans=False)
|
||||
for ent in obj.body:
|
||||
if isinstance(ent, Junk):
|
||||
raise Exception(f"file had junk! {path} {ent}")
|
||||
if isinstance(ent, Message):
|
||||
key = ent.id.name
|
||||
val = "".join(serialize_element(elem) for elem in ent.value.elements)
|
||||
if val in keys_by_value:
|
||||
print("duplicate found:", keys_by_value[val], key)
|
||||
keys_by_value.setdefault(val, []).append(key)
|
||||
|
||||
json.dump(
|
||||
keys_by_value, open(os.path.join(root, "keys_by_value.json"), "w", encoding="utf8")
|
||||
)
|
||||
print("keys:", len(keys_by_value))
|
|
@ -1,99 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
"""
|
||||
Parse and re-serialize ftl files to get them in a consistent form.
|
||||
"""
|
||||
|
||||
import difflib
|
||||
import glob
|
||||
import os
|
||||
from typing import List
|
||||
|
||||
from compare_locales import parser
|
||||
from compare_locales.checks.fluent import ReferenceMessageVisitor
|
||||
from compare_locales.paths import File
|
||||
from fluent.syntax import parse, serialize
|
||||
from fluent.syntax.ast import Junk
|
||||
|
||||
|
||||
def check_missing_terms(path: str) -> bool:
|
||||
"True if file is ok."
|
||||
file = File(path, os.path.basename(path))
|
||||
content = open(path, "rb").read()
|
||||
p = parser.getParser(file.file)
|
||||
p.readContents(content)
|
||||
refList = p.parse()
|
||||
|
||||
p.readContents(content)
|
||||
for e in p.parse():
|
||||
ref_data = ReferenceMessageVisitor()
|
||||
ref_data.visit(e.entry)
|
||||
|
||||
for attr_or_val, refs in ref_data.entry_refs.items():
|
||||
for ref, ref_type in refs.items():
|
||||
if ref not in refList:
|
||||
print(f"In {path}:{e}, missing '{ref}'")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def check_file(path: str, fix: bool) -> bool:
|
||||
"True if file is ok."
|
||||
orig_text = open(path, encoding="utf8").read()
|
||||
obj = parse(orig_text, with_spans=False)
|
||||
# make sure there's no junk
|
||||
for ent in obj.body:
|
||||
if isinstance(ent, Junk):
|
||||
raise Exception(f"file had junk! {path} {ent}")
|
||||
# serialize
|
||||
new_text = serialize(obj)
|
||||
# make sure serializing did not introduce new junk
|
||||
obj = parse(new_text, with_spans=False)
|
||||
for ent in obj.body:
|
||||
if isinstance(ent, Junk):
|
||||
raise Exception(f"file introduced junk! {path} {ent}")
|
||||
|
||||
if new_text == orig_text:
|
||||
return check_missing_terms(path)
|
||||
|
||||
if fix:
|
||||
print(f"Fixing {path}")
|
||||
open(path, "w", newline="\n", encoding="utf8").write(new_text)
|
||||
return True
|
||||
else:
|
||||
print(f"Bad formatting in {path}")
|
||||
print(
|
||||
"\n".join(
|
||||
difflib.unified_diff(
|
||||
orig_text.splitlines(),
|
||||
new_text.splitlines(),
|
||||
fromfile="bad",
|
||||
tofile="good",
|
||||
lineterm="",
|
||||
)
|
||||
)
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def check_files(files: List[str], fix: bool) -> bool:
|
||||
"True if files ok."
|
||||
|
||||
found_bad = False
|
||||
for path in files:
|
||||
ok = check_file(path, fix)
|
||||
if not ok:
|
||||
found_bad = True
|
||||
return not found_bad
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
template_root = os.environ["BUILD_WORKSPACE_DIRECTORY"]
|
||||
template_files = glob.glob(
|
||||
os.path.join(template_root, "ftl", "*", "*.ftl"), recursive=True
|
||||
)
|
||||
|
||||
check_files(template_files, fix=True)
|
|
@ -1,14 +0,0 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import glob
|
||||
import os
|
||||
import sys
|
||||
|
||||
import format
|
||||
|
||||
template_root = os.path.dirname(sys.argv[1])
|
||||
template_files = glob.glob(os.path.join(template_root, "*", "*.ftl"), recursive=True)
|
||||
|
||||
if not format.check_files(template_files, fix=False):
|
||||
sys.exit(1)
|
|
@ -1 +1 @@
|
|||
Subproject commit 17216b03db7249600542e388bd4ea124478400e5
|
||||
Subproject commit fd5f984785ad07a0d3dbd893ee3d7e3671eaebd6
|
|
@ -8,3 +8,5 @@ about-if-you-have-contributed-and-are = If you have contributed and are not on t
|
|||
about-version = Version { $val }
|
||||
about-visit-website = <a href='{ $val }'>Visit website</a>
|
||||
about-written-by-damien-elmes-with-patches = Written by Damien Elmes, with patches, translation, testing and design from:<p>{ $cont }
|
||||
# appended to the end of the contributor list in the about screen
|
||||
about-and-others = and others
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
qt-accel-about = &About
|
||||
qt-accel-about-mac = About Anki...
|
||||
qt-accel-cards = &Cards
|
||||
qt-accel-check-database = &Check Database
|
||||
qt-accel-check-media = Check &Media
|
||||
|
@ -40,7 +41,9 @@ qt-accel-layout-horizontal = &Horizontal
|
|||
qt-accel-zoom-in = Zoom &In
|
||||
qt-accel-zoom-out = Zoom &Out
|
||||
qt-accel-reset-zoom = &Reset Zoom
|
||||
qt-accel-toggle-sidebar = Toggle Sidebar
|
||||
qt-accel-zoom-editor-in = Zoom Editor &In
|
||||
qt-accel-zoom-editor-out = Zoom Editor &Out
|
||||
qt-accel-create-backup = Create &Backup
|
||||
qt-accel-load-backup = &Revert to Backup
|
||||
qt-accel-upgrade-downgrade = Upgrade/Downgrade
|
||||
|
|
|
@ -3,7 +3,7 @@ qt-misc-addons = Add-ons
|
|||
qt-misc-all-cards-notes-and-media-for = All cards, notes, and media for this profile will be deleted. Are you sure?
|
||||
qt-misc-all-cards-notes-and-media-for2 = All cards, notes, and media for the profile "{ $name }" will be deleted. Are you sure?
|
||||
qt-misc-anki-updatedanki-has-been-released = <h1>Anki Updated</h1>Anki { $val } has been released.<br><br>
|
||||
qt-misc-automatic-syncing-and-backups-have-been = Automatic syncing and backups have been disabled while restoring. To enable them again, close the profile or restart Anki.
|
||||
qt-misc-automatic-syncing-and-backups-have-been = Backup successfully restored. Automatic syncing and backups have been disabled for now. To enable them again, close the profile or restart Anki.
|
||||
qt-misc-back-side-only = Back Side Only
|
||||
qt-misc-backing-up = Backing Up...
|
||||
qt-misc-browse = Browse
|
||||
|
@ -73,6 +73,7 @@ qt-misc-second =
|
|||
qt-misc-layout-auto-enabled = Responsive layout enabled
|
||||
qt-misc-layout-vertical-enabled = Vertical layout enabled
|
||||
qt-misc-layout-horizontal-enabled = Horizontal layout enabled
|
||||
qt-misc-open-anki-launcher = Change to a different Anki version?
|
||||
|
||||
## deprecated- these strings will be removed in the future, and do not need
|
||||
## to be translated
|
||||
|
|
|
@ -435,7 +435,7 @@ impl TextWriter {
|
|||
item = item.trim_start_matches(' ');
|
||||
}
|
||||
|
||||
write!(self.buffer, "{}", item)
|
||||
write!(self.buffer, "{item}")
|
||||
}
|
||||
|
||||
fn write_char_into_indent(&mut self, ch: char) {
|
||||
|
|
|
@ -67,7 +67,7 @@ fn additional_template_folder(dst_folder: &Utf8Path) -> Option<Utf8PathBuf> {
|
|||
|
||||
fn all_langs(lang_folder: &Utf8Path) -> Result<Vec<Utf8PathBuf>> {
|
||||
std::fs::read_dir(lang_folder)
|
||||
.with_context(|| format!("reading {:?}", lang_folder))?
|
||||
.with_context(|| format!("reading {lang_folder:?}"))?
|
||||
.filter_map(Result::ok)
|
||||
.map(|e| Ok(e.path().utf8()?))
|
||||
.collect()
|
||||
|
@ -84,6 +84,7 @@ fn ftl_file_from_key(old_key: &str) -> String {
|
|||
"deck-config",
|
||||
"empty-cards",
|
||||
"media-check",
|
||||
"qt-misc",
|
||||
] {
|
||||
if old_key.starts_with(&format!("{prefix}-")) {
|
||||
return format!("{prefix}.ftl");
|
||||
|
|
2
ninja
2
ninja
|
@ -8,7 +8,7 @@ else
|
|||
out="$BUILD_ROOT"
|
||||
fi
|
||||
export CARGO_TARGET_DIR=$out/rust
|
||||
export RECONFIGURE_KEY="${MAC_X86};${SOURCEMAP};${HMR}"
|
||||
export RECONFIGURE_KEY="${MAC_X86};${LIN_ARM64};${SOURCEMAP};${HMR}"
|
||||
|
||||
if [ "$SKIP_RUNNER_BUILD" = "1" ]; then
|
||||
echo "Runner not rebuilt."
|
||||
|
|
29
package.json
29
package.json
|
@ -19,8 +19,8 @@
|
|||
"@poppanator/sveltekit-svg": "^5.0.0",
|
||||
"@sqltools/formatter": "^1.2.2",
|
||||
"@sveltejs/adapter-static": "^3.0.0",
|
||||
"@sveltejs/kit": "^2.8.3",
|
||||
"@sveltejs/vite-plugin-svelte": "4.0.0",
|
||||
"@sveltejs/kit": "^2.22.2",
|
||||
"@sveltejs/vite-plugin-svelte": "5.1",
|
||||
"@types/bootstrap": "^5.0.12",
|
||||
"@types/codemirror": "^5.60.0",
|
||||
"@types/d3": "^7.0.0",
|
||||
|
@ -30,16 +30,16 @@
|
|||
"@types/jqueryui": "^1.12.13",
|
||||
"@types/lodash-es": "^4.17.4",
|
||||
"@types/marked": "^5.0.0",
|
||||
"@types/node": "^20",
|
||||
"@types/node": "^22",
|
||||
"@typescript-eslint/eslint-plugin": "^5.60.1",
|
||||
"@typescript-eslint/parser": "^5.60.1",
|
||||
"caniuse-lite": "^1.0.30001431",
|
||||
"cross-env": "^7.0.2",
|
||||
"diff": "^5.0.0",
|
||||
"dprint": "^0.47.2",
|
||||
"esbuild": "^0.19.10",
|
||||
"esbuild-sass-plugin": "^2",
|
||||
"esbuild-svelte": "^0.8.1",
|
||||
"esbuild": "^0.25.3",
|
||||
"esbuild-sass-plugin": "^3.3.1",
|
||||
"esbuild-svelte": "^0.9.2",
|
||||
"eslint": "^8.44.0",
|
||||
"eslint-plugin-compat": "^4.1.4",
|
||||
"eslint-plugin-import": "^2.25.4",
|
||||
|
@ -48,16 +48,16 @@
|
|||
"prettier": "^3.4.2",
|
||||
"prettier-plugin-svelte": "^3.3.2",
|
||||
"sass": "<1.77",
|
||||
"svelte": "^5.17.3",
|
||||
"svelte-check": "^3.4.4",
|
||||
"svelte-preprocess": "^5.0.4",
|
||||
"svelte": "^5.34.9",
|
||||
"svelte-check": "^4.2.2",
|
||||
"svelte-preprocess": "^6.0.3",
|
||||
"svelte-preprocess-esbuild": "^3.0.1",
|
||||
"svgo": "^3.2.0",
|
||||
"tslib": "^2.0.3",
|
||||
"tsx": "^3.12.0",
|
||||
"tsx": "^4.8.1",
|
||||
"typescript": "^5.0.4",
|
||||
"vite": "^5.4.10",
|
||||
"vitest": "^2"
|
||||
"vite": "6",
|
||||
"vitest": "^3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@bufbuild/protobuf": "^1.2.1",
|
||||
|
@ -75,12 +75,15 @@
|
|||
"jquery": "^3.5.1",
|
||||
"jquery-ui-dist": "^1.12.1",
|
||||
"lodash-es": "^4.17.21",
|
||||
"lru-cache": "^10.2.0",
|
||||
"marked": "^5.1.0",
|
||||
"mathjax": "^3.1.2"
|
||||
},
|
||||
"resolutions": {
|
||||
"canvas": "npm:empty-npm-package@1.0.0",
|
||||
"cookie": "0.7.0"
|
||||
"cookie": "0.7.0",
|
||||
"devalue": "^5.3.2",
|
||||
"vite": "6"
|
||||
},
|
||||
"browserslist": [
|
||||
"defaults",
|
||||
|
|
|
@ -127,6 +127,7 @@ message RenderCardResponse {
|
|||
repeated RenderedTemplateNode answer_nodes = 2;
|
||||
string css = 3;
|
||||
bool latex_svg = 4;
|
||||
bool is_empty = 5;
|
||||
}
|
||||
|
||||
message RenderedTemplateNode {
|
||||
|
|
|
@ -50,6 +50,8 @@ message Card {
|
|||
optional uint32 original_position = 18;
|
||||
optional FsrsMemoryState memory_state = 20;
|
||||
optional float desired_retention = 21;
|
||||
optional float decay = 22;
|
||||
optional int64 last_review_time_secs = 23;
|
||||
string custom_data = 19;
|
||||
}
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@ service CollectionService {
|
|||
rpc MergeUndoEntries(generic.UInt32) returns (OpChanges);
|
||||
rpc LatestProgress(generic.Empty) returns (Progress);
|
||||
rpc SetWantsAbort(generic.Empty) returns (generic.Empty);
|
||||
rpc SetLoadBalancerEnabled(generic.Bool) returns (OpChanges);
|
||||
}
|
||||
|
||||
// Implicitly includes any of the above methods that are not listed in the
|
||||
|
|
|
@ -56,6 +56,7 @@ message ConfigKey {
|
|||
RENDER_LATEX = 25;
|
||||
LOAD_BALANCER_ENABLED = 26;
|
||||
FSRS_SHORT_TERM_WITH_STEPS_ENABLED = 27;
|
||||
FSRS_LEGACY_EVALUATE = 28;
|
||||
}
|
||||
enum String {
|
||||
SET_DUE_BROWSER = 0;
|
||||
|
|
|
@ -23,6 +23,10 @@ service DeckConfigService {
|
|||
rpc GetDeckConfigsForUpdate(decks.DeckId) returns (DeckConfigsForUpdate);
|
||||
rpc UpdateDeckConfigs(UpdateDeckConfigsRequest)
|
||||
returns (collection.OpChanges);
|
||||
rpc GetIgnoredBeforeCount(GetIgnoredBeforeCountRequest)
|
||||
returns (GetIgnoredBeforeCountResponse);
|
||||
rpc GetRetentionWorkload(GetRetentionWorkloadRequest)
|
||||
returns (GetRetentionWorkloadResponse);
|
||||
}
|
||||
|
||||
// Implicitly includes any of the above methods that are not listed in the
|
||||
|
@ -33,6 +37,25 @@ message DeckConfigId {
|
|||
int64 dcid = 1;
|
||||
}
|
||||
|
||||
message GetRetentionWorkloadRequest {
|
||||
repeated float w = 1;
|
||||
string search = 2;
|
||||
}
|
||||
|
||||
message GetRetentionWorkloadResponse {
|
||||
map<uint32, float> costs = 1;
|
||||
}
|
||||
|
||||
message GetIgnoredBeforeCountRequest {
|
||||
string ignore_revlogs_before_date = 1;
|
||||
string search = 2;
|
||||
}
|
||||
|
||||
message GetIgnoredBeforeCountResponse {
|
||||
uint64 included = 1;
|
||||
uint64 total = 2;
|
||||
}
|
||||
|
||||
message DeckConfig {
|
||||
message Config {
|
||||
enum NewCardInsertOrder {
|
||||
|
@ -110,9 +133,10 @@ message DeckConfig {
|
|||
|
||||
repeated float fsrs_params_4 = 3;
|
||||
repeated float fsrs_params_5 = 5;
|
||||
repeated float fsrs_params_6 = 6;
|
||||
|
||||
// consider saving remaining ones for fsrs param changes
|
||||
reserved 6 to 8;
|
||||
reserved 7 to 8;
|
||||
|
||||
uint32 new_per_day = 9;
|
||||
uint32 reviews_per_day = 10;
|
||||
|
@ -193,6 +217,8 @@ message DeckConfigsForUpdate {
|
|||
bool review_today_active = 5;
|
||||
// Whether new_today applies to today or a past day.
|
||||
bool new_today_active = 6;
|
||||
// Deck-specific desired retention override
|
||||
optional float desired_retention = 7;
|
||||
}
|
||||
string name = 1;
|
||||
int64 config_id = 2;
|
||||
|
@ -209,6 +235,8 @@ message DeckConfigsForUpdate {
|
|||
// only applies to v3 scheduler
|
||||
bool new_cards_ignore_review_limit = 7;
|
||||
bool fsrs = 8;
|
||||
bool fsrs_health_check = 11;
|
||||
bool fsrs_legacy_evaluate = 12;
|
||||
bool apply_all_parent_limits = 9;
|
||||
uint32 days_since_last_fsrs_optimize = 10;
|
||||
}
|
||||
|
@ -232,4 +260,5 @@ message UpdateDeckConfigsRequest {
|
|||
bool fsrs = 8;
|
||||
bool apply_all_parent_limits = 9;
|
||||
bool fsrs_reschedule = 10;
|
||||
bool fsrs_health_check = 11;
|
||||
}
|
||||
|
|
|
@ -83,6 +83,8 @@ message Deck {
|
|||
optional uint32 new_limit = 7;
|
||||
DayLimit review_limit_today = 8;
|
||||
DayLimit new_limit_today = 9;
|
||||
// Deck-specific desired retention override
|
||||
optional float desired_retention = 10;
|
||||
|
||||
reserved 12 to 15;
|
||||
}
|
||||
|
|
|
@ -27,6 +27,9 @@ service FrontendService {
|
|||
rpc deckOptionsRequireClose(generic.Empty) returns (generic.Empty);
|
||||
// Warns python that the deck option web view is ready to receive requests.
|
||||
rpc deckOptionsReady(generic.Empty) returns (generic.Empty);
|
||||
|
||||
// Save colour picker's custom colour palette
|
||||
rpc SaveCustomColours(generic.Empty) returns (generic.Empty);
|
||||
}
|
||||
|
||||
service BackendFrontendService {}
|
||||
|
|
|
@ -176,9 +176,12 @@ message CsvMetadata {
|
|||
// to determine the number of columns.
|
||||
repeated string column_labels = 5;
|
||||
oneof deck {
|
||||
// id of an existing deck
|
||||
int64 deck_id = 6;
|
||||
// One-based. 0 means n/a.
|
||||
uint32 deck_column = 7;
|
||||
// name of new deck to be created
|
||||
string deck_name = 17;
|
||||
}
|
||||
oneof notetype {
|
||||
// One notetype for all rows with given column mapping.
|
||||
|
|
|
@ -59,7 +59,7 @@ message AddNoteRequest {
|
|||
}
|
||||
|
||||
message AddNoteResponse {
|
||||
collection.OpChanges changes = 1;
|
||||
collection.OpChangesWithCount changes = 1;
|
||||
int64 note_id = 2;
|
||||
}
|
||||
|
||||
|
|
|
@ -33,6 +33,7 @@ service NotetypesService {
|
|||
rpc GetFieldNames(NotetypeId) returns (generic.StringList);
|
||||
rpc RestoreNotetypeToStock(RestoreNotetypeToStockRequest)
|
||||
returns (collection.OpChanges);
|
||||
rpc GetClozeFieldOrds(NotetypeId) returns (GetClozeFieldOrdsResponse);
|
||||
}
|
||||
|
||||
// Implicitly includes any of the above methods that are not listed in the
|
||||
|
@ -242,3 +243,7 @@ enum ClozeField {
|
|||
CLOZE_FIELD_TEXT = 0;
|
||||
CLOZE_FIELD_BACK_EXTRA = 1;
|
||||
}
|
||||
|
||||
message GetClozeFieldOrdsResponse {
|
||||
repeated uint32 ords = 1;
|
||||
}
|
|
@ -12,6 +12,7 @@ import "anki/cards.proto";
|
|||
import "anki/decks.proto";
|
||||
import "anki/collection.proto";
|
||||
import "anki/config.proto";
|
||||
import "anki/deck_config.proto";
|
||||
|
||||
service SchedulerService {
|
||||
rpc GetQueuedCards(GetQueuedCardsRequest) returns (QueuedCards);
|
||||
|
@ -35,6 +36,7 @@ service SchedulerService {
|
|||
rpc ScheduleCardsAsNewDefaults(ScheduleCardsAsNewDefaultsRequest)
|
||||
returns (ScheduleCardsAsNewDefaultsResponse);
|
||||
rpc SetDueDate(SetDueDateRequest) returns (collection.OpChanges);
|
||||
rpc GradeNow(GradeNowRequest) returns (collection.OpChanges);
|
||||
rpc SortCards(SortCardsRequest) returns (collection.OpChangesWithCount);
|
||||
rpc SortDeck(SortDeckRequest) returns (collection.OpChangesWithCount);
|
||||
rpc GetSchedulingStates(cards.CardId) returns (SchedulingStates);
|
||||
|
@ -49,11 +51,15 @@ service SchedulerService {
|
|||
returns (ComputeFsrsParamsResponse);
|
||||
rpc GetOptimalRetentionParameters(GetOptimalRetentionParametersRequest)
|
||||
returns (GetOptimalRetentionParametersResponse);
|
||||
rpc ComputeOptimalRetention(ComputeOptimalRetentionRequest)
|
||||
rpc ComputeOptimalRetention(SimulateFsrsReviewRequest)
|
||||
returns (ComputeOptimalRetentionResponse);
|
||||
rpc SimulateFsrsReview(SimulateFsrsReviewRequest)
|
||||
returns (SimulateFsrsReviewResponse);
|
||||
rpc SimulateFsrsWorkload(SimulateFsrsReviewRequest)
|
||||
returns (SimulateFsrsWorkloadResponse);
|
||||
rpc EvaluateParams(EvaluateParamsRequest) returns (EvaluateParamsResponse);
|
||||
rpc EvaluateParamsLegacy(EvaluateParamsLegacyRequest)
|
||||
returns (EvaluateParamsResponse);
|
||||
rpc ComputeMemoryState(cards.CardId) returns (ComputeMemoryStateResponse);
|
||||
// The number of days the calculated interval was fuzzed by on the previous
|
||||
// review (if any). Utilized by the FSRS add-on.
|
||||
|
@ -237,6 +243,11 @@ message SetDueDateRequest {
|
|||
config.OptionalStringConfigKey config_key = 3;
|
||||
}
|
||||
|
||||
message GradeNowRequest {
|
||||
repeated int64 card_ids = 1;
|
||||
CardAnswer.Rating rating = 2;
|
||||
}
|
||||
|
||||
message SortCardsRequest {
|
||||
repeated int64 card_ids = 1;
|
||||
uint32 starting_from = 2;
|
||||
|
@ -347,11 +358,13 @@ message ComputeFsrsParamsRequest {
|
|||
repeated float current_params = 2;
|
||||
int64 ignore_revlogs_before_ms = 3;
|
||||
uint32 num_of_relearning_steps = 4;
|
||||
bool health_check = 5;
|
||||
}
|
||||
|
||||
message ComputeFsrsParamsResponse {
|
||||
repeated float params = 1;
|
||||
uint32 fsrs_items = 2;
|
||||
optional bool health_check_passed = 3;
|
||||
}
|
||||
|
||||
message ComputeFsrsParamsFromItemsRequest {
|
||||
|
@ -390,6 +403,12 @@ message SimulateFsrsReviewRequest {
|
|||
uint32 max_interval = 7;
|
||||
string search = 8;
|
||||
bool new_cards_ignore_review_limit = 9;
|
||||
repeated float easy_days_percentages = 10;
|
||||
deck_config.DeckConfig.Config.ReviewCardOrder review_order = 11;
|
||||
optional uint32 suspend_after_lapse_count = 12;
|
||||
float historical_retention = 13;
|
||||
uint32 learning_step_count = 14;
|
||||
uint32 relearning_step_count = 15;
|
||||
}
|
||||
|
||||
message SimulateFsrsReviewResponse {
|
||||
|
@ -399,12 +418,10 @@ message SimulateFsrsReviewResponse {
|
|||
repeated float daily_time_cost = 4;
|
||||
}
|
||||
|
||||
message ComputeOptimalRetentionRequest {
|
||||
repeated float params = 1;
|
||||
uint32 days_to_simulate = 2;
|
||||
uint32 max_interval = 3;
|
||||
string search = 4;
|
||||
double loss_aversion = 5;
|
||||
message SimulateFsrsWorkloadResponse {
|
||||
map<uint32, float> cost = 1;
|
||||
map<uint32, float> memorized = 2;
|
||||
map<uint32, uint32> review_count = 3;
|
||||
}
|
||||
|
||||
message ComputeOptimalRetentionResponse {
|
||||
|
@ -420,20 +437,25 @@ message GetOptimalRetentionParametersResponse {
|
|||
uint32 learn_span = 2;
|
||||
float max_cost_perday = 3;
|
||||
float max_ivl = 4;
|
||||
repeated float learn_costs = 5;
|
||||
repeated float review_costs = 6;
|
||||
repeated float first_rating_prob = 7;
|
||||
repeated float review_rating_prob = 8;
|
||||
repeated float first_rating_offsets = 9;
|
||||
repeated float first_session_lens = 10;
|
||||
float forget_rating_offset = 11;
|
||||
float forget_session_len = 12;
|
||||
float loss_aversion = 13;
|
||||
uint32 learn_limit = 14;
|
||||
uint32 review_limit = 15;
|
||||
repeated float first_rating_prob = 5;
|
||||
repeated float review_rating_prob = 6;
|
||||
float loss_aversion = 7;
|
||||
uint32 learn_limit = 8;
|
||||
uint32 review_limit = 9;
|
||||
repeated float learning_step_transitions = 10;
|
||||
repeated float relearning_step_transitions = 11;
|
||||
repeated float state_rating_costs = 12;
|
||||
uint32 learning_step_count = 13;
|
||||
uint32 relearning_step_count = 14;
|
||||
}
|
||||
|
||||
message EvaluateParamsRequest {
|
||||
string search = 1;
|
||||
int64 ignore_revlogs_before_ms = 2;
|
||||
uint32 num_of_relearning_steps = 3;
|
||||
}
|
||||
|
||||
message EvaluateParamsLegacyRequest {
|
||||
repeated float params = 1;
|
||||
string search = 2;
|
||||
int64 ignore_revlogs_before_ms = 3;
|
||||
|
@ -447,6 +469,7 @@ message EvaluateParamsResponse {
|
|||
message ComputeMemoryStateResponse {
|
||||
optional cards.FsrsMemoryState state = 1;
|
||||
float desired_retention = 2;
|
||||
float decay = 3;
|
||||
}
|
||||
|
||||
message FuzzDeltaRequest {
|
||||
|
|
|
@ -74,10 +74,15 @@ message SearchNode {
|
|||
repeated SearchNode nodes = 1;
|
||||
Joiner joiner = 2;
|
||||
}
|
||||
enum FieldSearchMode {
|
||||
FIELD_SEARCH_MODE_NORMAL = 0;
|
||||
FIELD_SEARCH_MODE_REGEX = 1;
|
||||
FIELD_SEARCH_MODE_NOCOMBINING = 2;
|
||||
}
|
||||
message Field {
|
||||
string field_name = 1;
|
||||
string text = 2;
|
||||
bool is_re = 3;
|
||||
FieldSearchMode mode = 3;
|
||||
}
|
||||
|
||||
oneof filter {
|
||||
|
|
|
@ -65,6 +65,7 @@ message CardStatsResponse {
|
|||
string preset = 21;
|
||||
optional string original_deck = 22;
|
||||
optional float desired_retention = 23;
|
||||
repeated float fsrs_params = 24;
|
||||
}
|
||||
|
||||
message GraphsRequest {
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue