mirror of
https://github.com/ankitects/anki.git
synced 2026-01-08 03:23:54 -05:00
Merge branch 'main' into exposing_consts
This commit is contained in:
commit
098ff32187
68 changed files with 937 additions and 299 deletions
|
|
@ -16,6 +16,7 @@ if [ "$CLEAR_RUST" = "1" ]; then
|
||||||
rm -rf $BUILD_ROOT/rust
|
rm -rf $BUILD_ROOT/rust
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
rm -f out/build.ninja
|
||||||
./ninja pylib qt check
|
./ninja pylib qt check
|
||||||
|
|
||||||
echo "--- Ensure libs importable"
|
echo "--- Ensure libs importable"
|
||||||
|
|
|
||||||
13
.idea.dist/repo.iml
Normal file
13
.idea.dist/repo.iml
Normal file
|
|
@ -0,0 +1,13 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<module type="PYTHON_MODULE" version="4">
|
||||||
|
<component name="NewModuleRootManager">
|
||||||
|
<content url="file://$MODULE_DIR$">
|
||||||
|
<sourceFolder url="file://$MODULE_DIR$/out/pylib" isTestSource="false" />
|
||||||
|
<sourceFolder url="file://$MODULE_DIR$/pylib" isTestSource="false" />
|
||||||
|
<sourceFolder url="file://$MODULE_DIR$/qt" isTestSource="false" />
|
||||||
|
<excludeFolder url="file://$MODULE_DIR$/extra" />
|
||||||
|
<excludeFolder url="file://$MODULE_DIR$/out/pyenv" />
|
||||||
|
</content>
|
||||||
|
<orderEntry type="sourceFolder" forTests="false" />
|
||||||
|
</component>
|
||||||
|
</module>
|
||||||
|
|
@ -12,8 +12,7 @@
|
||||||
"command": "tools/ninja.bat",
|
"command": "tools/ninja.bat",
|
||||||
"args": [
|
"args": [
|
||||||
"pylib",
|
"pylib",
|
||||||
"qt",
|
"qt"
|
||||||
"extract:win_amd64_audio"
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
14
CONTRIBUTORS
14
CONTRIBUTORS
|
|
@ -189,7 +189,7 @@ Christian Donat <https://github.com/cdonat2>
|
||||||
Asuka Minato <https://asukaminato.eu.org>
|
Asuka Minato <https://asukaminato.eu.org>
|
||||||
Dillon Baldwin <https://github.com/DillBal>
|
Dillon Baldwin <https://github.com/DillBal>
|
||||||
Voczi <https://github.com/voczi>
|
Voczi <https://github.com/voczi>
|
||||||
Ben Nguyen <105088397+bpnguyen107@users.noreply.github.com>
|
Ben Nguyen <105088397+bpnguyen107@users.noreply.github.com>
|
||||||
Themis Demetriades <themis100@outlook.com>
|
Themis Demetriades <themis100@outlook.com>
|
||||||
Luke Bartholomew <lukesbart@icloud.com>
|
Luke Bartholomew <lukesbart@icloud.com>
|
||||||
Gregory Abrasaldo <degeemon@gmail.com>
|
Gregory Abrasaldo <degeemon@gmail.com>
|
||||||
|
|
@ -243,6 +243,18 @@ Lee Doughty <32392044+leedoughty@users.noreply.github.com>
|
||||||
memchr <memchr@proton.me>
|
memchr <memchr@proton.me>
|
||||||
Max Romanowski <maxr777@proton.me>
|
Max Romanowski <maxr777@proton.me>
|
||||||
Aldlss <ayaldlss@gmail.com>
|
Aldlss <ayaldlss@gmail.com>
|
||||||
|
Hanna Nilsén <hanni614@student.liu.se>
|
||||||
|
Elias Johansson Lara <elias.johanssonlara@gmail.com>
|
||||||
|
Toby Penner <tobypenner01@gmail.com>
|
||||||
|
Danilo Spillebeen <spillebeendanilo@gmail.com>
|
||||||
|
Matbe766 <matildabergstrom01@gmail.com>
|
||||||
|
Amanda Sternberg <mandis.sternberg@gmail.com>
|
||||||
|
arold0 <arold0@icloud.com>
|
||||||
|
nav1s <nav1s@proton.me>
|
||||||
|
Ranjit Odedra <ranjitodedra.dev@gmail.com>
|
||||||
|
Eltaurus <https://github.com/Eltaurus-Lt>
|
||||||
|
jariji
|
||||||
|
Francisco Esteva <fr.esteva@duocuc.cl>
|
||||||
|
|
||||||
********************
|
********************
|
||||||
|
|
||||||
|
|
|
||||||
39
Cargo.lock
generated
39
Cargo.lock
generated
|
|
@ -46,9 +46,9 @@ checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ammonia"
|
name = "ammonia"
|
||||||
version = "4.1.1"
|
version = "4.1.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d6b346764dd0814805de8abf899fe03065bcee69bb1a4771c785817e39f3978f"
|
checksum = "17e913097e1a2124b46746c980134e8c954bc17a6a59bb3fde96f088d126dde6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cssparser",
|
"cssparser",
|
||||||
"html5ever 0.35.0",
|
"html5ever 0.35.0",
|
||||||
|
|
@ -3555,6 +3555,7 @@ dependencies = [
|
||||||
name = "launcher"
|
name = "launcher"
|
||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"anki_i18n",
|
||||||
"anki_io",
|
"anki_io",
|
||||||
"anki_process",
|
"anki_process",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
|
@ -3563,6 +3564,7 @@ dependencies = [
|
||||||
"embed-resource",
|
"embed-resource",
|
||||||
"libc",
|
"libc",
|
||||||
"libc-stdhandle",
|
"libc-stdhandle",
|
||||||
|
"locale_config",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"widestring",
|
"widestring",
|
||||||
"windows 0.61.3",
|
"windows 0.61.3",
|
||||||
|
|
@ -3702,6 +3704,19 @@ version = "0.4.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f5e54036fe321fd421e10d732f155734c4e4afd610dd556d9a82833ab3ee0bed"
|
checksum = "f5e54036fe321fd421e10d732f155734c4e4afd610dd556d9a82833ab3ee0bed"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "locale_config"
|
||||||
|
version = "0.3.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "08d2c35b16f4483f6c26f0e4e9550717a2f6575bcd6f12a53ff0c490a94a6934"
|
||||||
|
dependencies = [
|
||||||
|
"lazy_static",
|
||||||
|
"objc",
|
||||||
|
"objc-foundation",
|
||||||
|
"regex",
|
||||||
|
"winapi",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lock_api"
|
name = "lock_api"
|
||||||
version = "0.4.13"
|
version = "0.4.13"
|
||||||
|
|
@ -4380,6 +4395,26 @@ dependencies = [
|
||||||
"malloc_buf",
|
"malloc_buf",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "objc-foundation"
|
||||||
|
version = "0.1.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1add1b659e36c9607c7aab864a76c7a4c2760cd0cd2e120f3fb8b952c7e22bf9"
|
||||||
|
dependencies = [
|
||||||
|
"block",
|
||||||
|
"objc",
|
||||||
|
"objc_id",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "objc_id"
|
||||||
|
version = "0.1.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c92d4ddb4bd7b50d730c215ff871754d0da6b2178849f8a2a2ab69712d0c073b"
|
||||||
|
dependencies = [
|
||||||
|
"objc",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "object"
|
name = "object"
|
||||||
version = "0.36.7"
|
version = "0.36.7"
|
||||||
|
|
|
||||||
|
|
@ -51,7 +51,7 @@ ninja_gen = { "path" = "build/ninja_gen" }
|
||||||
unicase = "=2.6.0" # any changes could invalidate sqlite indexes
|
unicase = "=2.6.0" # any changes could invalidate sqlite indexes
|
||||||
|
|
||||||
# normal
|
# normal
|
||||||
ammonia = "4.1.0"
|
ammonia = "4.1.2"
|
||||||
anyhow = "1.0.98"
|
anyhow = "1.0.98"
|
||||||
async-compression = { version = "0.4.24", features = ["zstd", "tokio"] }
|
async-compression = { version = "0.4.24", features = ["zstd", "tokio"] }
|
||||||
async-stream = "0.3.6"
|
async-stream = "0.3.6"
|
||||||
|
|
@ -92,6 +92,7 @@ itertools = "0.14.0"
|
||||||
junction = "1.2.0"
|
junction = "1.2.0"
|
||||||
libc = "0.2"
|
libc = "0.2"
|
||||||
libc-stdhandle = "0.1"
|
libc-stdhandle = "0.1"
|
||||||
|
locale_config = "0.3.0"
|
||||||
maplit = "1.0.2"
|
maplit = "1.0.2"
|
||||||
nom = "8.0.0"
|
nom = "8.0.0"
|
||||||
num-format = "0.4.4"
|
num-format = "0.4.4"
|
||||||
|
|
|
||||||
|
|
@ -2226,7 +2226,7 @@
|
||||||
{
|
{
|
||||||
"authors": "Ibraheem Ahmed <ibraheem@ibraheem.ca>",
|
"authors": "Ibraheem Ahmed <ibraheem@ibraheem.ca>",
|
||||||
"description": "A high performance, zero-copy URL router.",
|
"description": "A high performance, zero-copy URL router.",
|
||||||
"license": "MIT AND BSD-3-Clause",
|
"license": "BSD-3-Clause AND MIT",
|
||||||
"license_file": null,
|
"license_file": null,
|
||||||
"name": "matchit",
|
"name": "matchit",
|
||||||
"repository": "https://github.com/ibraheemdev/matchit"
|
"repository": "https://github.com/ibraheemdev/matchit"
|
||||||
|
|
@ -4154,7 +4154,7 @@
|
||||||
{
|
{
|
||||||
"authors": "David Tolnay <dtolnay@gmail.com>",
|
"authors": "David Tolnay <dtolnay@gmail.com>",
|
||||||
"description": "Determine whether characters have the XID_Start or XID_Continue properties according to Unicode Standard Annex #31",
|
"description": "Determine whether characters have the XID_Start or XID_Continue properties according to Unicode Standard Annex #31",
|
||||||
"license": "(MIT OR Apache-2.0) AND Unicode-3.0",
|
"license": "(Apache-2.0 OR MIT) AND Unicode-3.0",
|
||||||
"license_file": null,
|
"license_file": null,
|
||||||
"name": "unicode-ident",
|
"name": "unicode-ident",
|
||||||
"repository": "https://github.com/dtolnay/unicode-ident"
|
"repository": "https://github.com/dtolnay/unicode-ident"
|
||||||
|
|
|
||||||
|
|
@ -46,10 +46,14 @@ see and install a number of recommended extensions.
|
||||||
|
|
||||||
## PyCharm/IntelliJ
|
## PyCharm/IntelliJ
|
||||||
|
|
||||||
If you decide to use PyCharm instead of VS Code, there are somethings to be
|
### Setting up Python environment
|
||||||
aware of.
|
|
||||||
|
|
||||||
### Pylib References
|
To make PyCharm recognize `anki` and `aqt` imports, you need to add source paths to _Settings > Project Structure_.
|
||||||
|
You can copy the provided .idea.dist directory to set up the paths automatically:
|
||||||
|
|
||||||
You'll need to use File>Project Structure to tell IntelliJ that pylib/ is a
|
```
|
||||||
sources root, so it knows references to 'anki' in aqt are valid.
|
mkdir .idea && cd .idea
|
||||||
|
ln -sf ../.idea.dist/* .
|
||||||
|
```
|
||||||
|
|
||||||
|
You also need to add a new Python interpreter under _Settings > Python > Interpreter_ pointing to the Python executable under `out/pyenv` (available after building Anki).
|
||||||
|
|
|
||||||
|
|
@ -1 +1 @@
|
||||||
Subproject commit 480ef0da728c7ea3485c58529ae7ee02be3e5dba
|
Subproject commit ec5e4cad6242e538cacf52265243668f0de5da80
|
||||||
|
|
@ -382,7 +382,7 @@ deck-config-which-deck = Which deck would you like to display options for?
|
||||||
## Messages related to the FSRS scheduler
|
## Messages related to the FSRS scheduler
|
||||||
|
|
||||||
deck-config-updating-cards = Updating cards: { $current_cards_count }/{ $total_cards_count }...
|
deck-config-updating-cards = Updating cards: { $current_cards_count }/{ $total_cards_count }...
|
||||||
deck-config-invalid-parameters = The provided FSRS parameters are invalid. Leave them blank to use the default parameters.
|
deck-config-invalid-parameters = The provided FSRS parameters are invalid. Leave them blank to use the default values.
|
||||||
deck-config-not-enough-history = Insufficient review history to perform this operation.
|
deck-config-not-enough-history = Insufficient review history to perform this operation.
|
||||||
deck-config-must-have-400-reviews =
|
deck-config-must-have-400-reviews =
|
||||||
{ $count ->
|
{ $count ->
|
||||||
|
|
|
||||||
38
ftl/core/launcher.ftl
Normal file
38
ftl/core/launcher.ftl
Normal file
|
|
@ -0,0 +1,38 @@
|
||||||
|
launcher-title = Anki Launcher
|
||||||
|
launcher-press-enter-to-install = Press the Enter/Return key on your keyboard to install or update Anki.
|
||||||
|
launcher-press-enter-to-start = Press enter to start Anki.
|
||||||
|
launcher-anki-will-start-shortly = Anki will start shortly.
|
||||||
|
launcher-you-can-close-this-window = You can close this window.
|
||||||
|
launcher-updating-anki = Updating Anki...
|
||||||
|
launcher-latest-anki = Install Latest Anki (default)
|
||||||
|
launcher-choose-a-version = Choose a version
|
||||||
|
launcher-sync-project-changes = Sync project changes
|
||||||
|
launcher-keep-existing-version = Keep existing version ({ $current })
|
||||||
|
launcher-revert-to-previous = Revert to previous version ({ $prev })
|
||||||
|
launcher-allow-betas = Allow betas: { $state }
|
||||||
|
launcher-on = on
|
||||||
|
launcher-off = off
|
||||||
|
launcher-cache-downloads = Cache downloads: { $state }
|
||||||
|
launcher-download-mirror = Download mirror: { $state }
|
||||||
|
launcher-uninstall = Uninstall Anki
|
||||||
|
launcher-invalid-input = Invalid input. Please try again.
|
||||||
|
launcher-latest-releases = Latest releases: { $releases }
|
||||||
|
launcher-enter-the-version-you-want = Enter the version you want to install:
|
||||||
|
launcher-versions-before-cant-be-installed = Versions before 2.1.50 can't be installed.
|
||||||
|
launcher-invalid-version = Invalid version.
|
||||||
|
launcher-unable-to-check-for-versions = Unable to check for Anki versions. Please check your internet connection.
|
||||||
|
launcher-checking-for-updates = Checking for updates...
|
||||||
|
launcher-uninstall-confirm = Uninstall Anki's program files? (y/n)
|
||||||
|
launcher-uninstall-cancelled = Uninstall cancelled.
|
||||||
|
launcher-program-files-removed = Program files removed.
|
||||||
|
launcher-remove-all-profiles-confirm = Remove all profiles/cards? (y/n)
|
||||||
|
launcher-user-data-removed = User data removed.
|
||||||
|
launcher-download-mirror-options = Download mirror options:
|
||||||
|
launcher-mirror-no-mirror = No mirror
|
||||||
|
launcher-mirror-china = China
|
||||||
|
launcher-mirror-disabled = Mirror disabled.
|
||||||
|
launcher-mirror-china-enabled = China mirror enabled.
|
||||||
|
launcher-beta-releases-enabled = Beta releases enabled.
|
||||||
|
launcher-beta-releases-disabled = Beta releases disabled.
|
||||||
|
launcher-download-caching-enabled = Download caching enabled.
|
||||||
|
launcher-download-caching-disabled = Download caching disabled and cache cleared.
|
||||||
|
|
@ -1 +1 @@
|
||||||
Subproject commit fd5f984785ad07a0d3dbd893ee3d7e3671eaebd6
|
Subproject commit 0b7c530233390d73b706f012bbe7489539925c7d
|
||||||
|
|
@ -20,6 +20,7 @@ service CollectionService {
|
||||||
rpc LatestProgress(generic.Empty) returns (Progress);
|
rpc LatestProgress(generic.Empty) returns (Progress);
|
||||||
rpc SetWantsAbort(generic.Empty) returns (generic.Empty);
|
rpc SetWantsAbort(generic.Empty) returns (generic.Empty);
|
||||||
rpc SetLoadBalancerEnabled(generic.Bool) returns (OpChanges);
|
rpc SetLoadBalancerEnabled(generic.Bool) returns (OpChanges);
|
||||||
|
rpc GetCustomColours(generic.Empty) returns (GetCustomColoursResponse);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Implicitly includes any of the above methods that are not listed in the
|
// Implicitly includes any of the above methods that are not listed in the
|
||||||
|
|
@ -163,3 +164,7 @@ message CreateBackupRequest {
|
||||||
bool force = 2;
|
bool force = 2;
|
||||||
bool wait_for_completion = 3;
|
bool wait_for_completion = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
message GetCustomColoursResponse {
|
||||||
|
repeated string colours = 1;
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -37,6 +37,8 @@ message CardStatsResponse {
|
||||||
uint32 ease = 5;
|
uint32 ease = 5;
|
||||||
float taken_secs = 6;
|
float taken_secs = 6;
|
||||||
optional cards.FsrsMemoryState memory_state = 7;
|
optional cards.FsrsMemoryState memory_state = 7;
|
||||||
|
// seconds
|
||||||
|
uint32 last_interval = 8;
|
||||||
}
|
}
|
||||||
repeated StatsRevlogEntry revlog = 1;
|
repeated StatsRevlogEntry revlog = 1;
|
||||||
int64 card_id = 2;
|
int64 card_id = 2;
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,7 @@ from anki._legacy import DeprecatedNamesMixinForModule
|
||||||
TR = anki._fluent.LegacyTranslationEnum
|
TR = anki._fluent.LegacyTranslationEnum
|
||||||
FormatTimeSpan = _pb.FormatTimespanRequest
|
FormatTimeSpan = _pb.FormatTimespanRequest
|
||||||
|
|
||||||
|
# When adding new languages here, check lang_to_disk_lang() below
|
||||||
langs = sorted(
|
langs = sorted(
|
||||||
[
|
[
|
||||||
("Afrikaans", "af_ZA"),
|
("Afrikaans", "af_ZA"),
|
||||||
|
|
@ -38,6 +38,7 @@ langs = sorted(
|
||||||
("Italiano", "it_IT"),
|
("Italiano", "it_IT"),
|
||||||
("lo jbobau", "jbo_EN"),
|
("lo jbobau", "jbo_EN"),
|
||||||
("Lenga d'òc", "oc_FR"),
|
("Lenga d'òc", "oc_FR"),
|
||||||
|
("Қазақша", "kk_KZ"),
|
||||||
("Magyar", "hu_HU"),
|
("Magyar", "hu_HU"),
|
||||||
("Nederlands", "nl_NL"),
|
("Nederlands", "nl_NL"),
|
||||||
("Norsk", "nb_NO"),
|
("Norsk", "nb_NO"),
|
||||||
|
|
@ -64,6 +65,7 @@ langs = sorted(
|
||||||
("Українська мова", "uk_UA"),
|
("Українська мова", "uk_UA"),
|
||||||
("Հայերեն", "hy_AM"),
|
("Հայերեն", "hy_AM"),
|
||||||
("עִבְרִית", "he_IL"),
|
("עִבְרִית", "he_IL"),
|
||||||
|
("ייִדיש", "yi"),
|
||||||
("العربية", "ar_SA"),
|
("العربية", "ar_SA"),
|
||||||
("فارسی", "fa_IR"),
|
("فارسی", "fa_IR"),
|
||||||
("ภาษาไทย", "th_TH"),
|
("ภาษาไทย", "th_TH"),
|
||||||
|
|
@ -104,6 +106,7 @@ compatMap = {
|
||||||
"it": "it_IT",
|
"it": "it_IT",
|
||||||
"ja": "ja_JP",
|
"ja": "ja_JP",
|
||||||
"jbo": "jbo_EN",
|
"jbo": "jbo_EN",
|
||||||
|
"kk": "kk_KZ",
|
||||||
"ko": "ko_KR",
|
"ko": "ko_KR",
|
||||||
"la": "la_LA",
|
"la": "la_LA",
|
||||||
"mn": "mn_MN",
|
"mn": "mn_MN",
|
||||||
|
|
@ -126,6 +129,7 @@ compatMap = {
|
||||||
"uk": "uk_UA",
|
"uk": "uk_UA",
|
||||||
"uz": "uz_UZ",
|
"uz": "uz_UZ",
|
||||||
"vi": "vi_VN",
|
"vi": "vi_VN",
|
||||||
|
"yi": "yi",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -233,7 +237,7 @@ def get_def_lang(user_lang: str | None = None) -> tuple[int, str]:
|
||||||
|
|
||||||
|
|
||||||
def is_rtl(lang: str) -> bool:
|
def is_rtl(lang: str) -> bool:
|
||||||
return lang in ("he", "ar", "fa", "ug")
|
return lang in ("he", "ar", "fa", "ug", "yi")
|
||||||
|
|
||||||
|
|
||||||
# strip off unicode isolation markers from a translated string
|
# strip off unicode isolation markers from a translated string
|
||||||
|
|
|
||||||
|
|
@ -32,6 +32,7 @@ def test_find_cards():
|
||||||
note = col.newNote()
|
note = col.newNote()
|
||||||
note["Front"] = "cat"
|
note["Front"] = "cat"
|
||||||
note["Back"] = "sheep"
|
note["Back"] = "sheep"
|
||||||
|
note.tags.append("conjunção größte")
|
||||||
col.addNote(note)
|
col.addNote(note)
|
||||||
catCard = note.cards()[0]
|
catCard = note.cards()[0]
|
||||||
m = col.models.current()
|
m = col.models.current()
|
||||||
|
|
@ -68,6 +69,8 @@ def test_find_cards():
|
||||||
col.tags.bulk_remove(col.db.list("select id from notes"), "foo")
|
col.tags.bulk_remove(col.db.list("select id from notes"), "foo")
|
||||||
assert len(col.find_cards("tag:foo")) == 0
|
assert len(col.find_cards("tag:foo")) == 0
|
||||||
assert len(col.find_cards("tag:bar")) == 5
|
assert len(col.find_cards("tag:bar")) == 5
|
||||||
|
assert len(col.find_cards("tag:conjuncao tag:groste")) == 0
|
||||||
|
assert len(col.find_cards("tag:nc:conjuncao tag:nc:groste")) == 1
|
||||||
# text searches
|
# text searches
|
||||||
assert len(col.find_cards("cat")) == 2
|
assert len(col.find_cards("cat")) == 2
|
||||||
assert len(col.find_cards("cat -dog")) == 1
|
assert len(col.find_cards("cat -dog")) == 1
|
||||||
|
|
|
||||||
|
|
@ -226,6 +226,7 @@ def show(mw: aqt.AnkiQt) -> QDialog:
|
||||||
"Anon_0000",
|
"Anon_0000",
|
||||||
"Bilolbek Normuminov",
|
"Bilolbek Normuminov",
|
||||||
"Sagiv Marzini",
|
"Sagiv Marzini",
|
||||||
|
"Zhanibek Rassululy",
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -289,6 +289,10 @@ class AddCards(QMainWindow):
|
||||||
def _add_current_note(self) -> None:
|
def _add_current_note(self) -> None:
|
||||||
note = self.editor.note
|
note = self.editor.note
|
||||||
|
|
||||||
|
# Prevent adding a note that has already been added (e.g., from double-clicking)
|
||||||
|
if note.id != 0:
|
||||||
|
return
|
||||||
|
|
||||||
if not self._note_can_be_added(note):
|
if not self._note_can_be_added(note):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -521,7 +521,7 @@ class Browser(QMainWindow):
|
||||||
self.search()
|
self.search()
|
||||||
|
|
||||||
def current_search(self) -> str:
|
def current_search(self) -> str:
|
||||||
return self._line_edit().text()
|
return self._line_edit().text().replace("\n", " ")
|
||||||
|
|
||||||
def search(self) -> None:
|
def search(self) -> None:
|
||||||
"""Search triggered programmatically. Caller must have saved note first."""
|
"""Search triggered programmatically. Caller must have saved note first."""
|
||||||
|
|
|
||||||
|
|
@ -80,7 +80,7 @@ class SidebarItem:
|
||||||
self.search_node = search_node
|
self.search_node = search_node
|
||||||
self.on_expanded = on_expanded
|
self.on_expanded = on_expanded
|
||||||
self.children: list[SidebarItem] = []
|
self.children: list[SidebarItem] = []
|
||||||
self.tooltip: str | None = None
|
self.tooltip: str = name
|
||||||
self._parent_item: SidebarItem | None = None
|
self._parent_item: SidebarItem | None = None
|
||||||
self._expanded = expanded
|
self._expanded = expanded
|
||||||
self._row_in_parent: int | None = None
|
self._row_in_parent: int | None = None
|
||||||
|
|
|
||||||
|
|
@ -85,11 +85,11 @@
|
||||||
</item>
|
</item>
|
||||||
<item row="2" column="2">
|
<item row="2" column="2">
|
||||||
<widget class="QSpinBox" name="limit">
|
<widget class="QSpinBox" name="limit">
|
||||||
<property name="maximumSize">
|
<property name="sizePolicy">
|
||||||
<size>
|
<sizepolicy hsizetype="Fixed" vsizetype="Fixed">
|
||||||
<width>60</width>
|
<horstretch>0</horstretch>
|
||||||
<height>16777215</height>
|
<verstretch>0</verstretch>
|
||||||
</size>
|
</sizepolicy>
|
||||||
</property>
|
</property>
|
||||||
<property name="minimum">
|
<property name="minimum">
|
||||||
<number>1</number>
|
<number>1</number>
|
||||||
|
|
@ -168,11 +168,11 @@
|
||||||
</item>
|
</item>
|
||||||
<item row="1" column="1">
|
<item row="1" column="1">
|
||||||
<widget class="QSpinBox" name="limit_2">
|
<widget class="QSpinBox" name="limit_2">
|
||||||
<property name="maximumSize">
|
<property name="sizePolicy">
|
||||||
<size>
|
<sizepolicy hsizetype="Fixed" vsizetype="Fixed">
|
||||||
<width>60</width>
|
<horstretch>0</horstretch>
|
||||||
<height>16777215</height>
|
<verstretch>0</verstretch>
|
||||||
</size>
|
</sizepolicy>
|
||||||
</property>
|
</property>
|
||||||
<property name="minimum">
|
<property name="minimum">
|
||||||
<number>1</number>
|
<number>1</number>
|
||||||
|
|
|
||||||
|
|
@ -47,6 +47,9 @@
|
||||||
<property name="insertPolicy">
|
<property name="insertPolicy">
|
||||||
<enum>QComboBox::NoInsert</enum>
|
<enum>QComboBox::NoInsert</enum>
|
||||||
</property>
|
</property>
|
||||||
|
<property name="sizeAdjustPolicy">
|
||||||
|
<enum>QComboBox::SizeAdjustPolicy::AdjustToMinimumContentsLengthWithIcon</enum>
|
||||||
|
</property>
|
||||||
</widget>
|
</widget>
|
||||||
</item>
|
</item>
|
||||||
</layout>
|
</layout>
|
||||||
|
|
|
||||||
|
|
@ -170,13 +170,42 @@ def favicon() -> Response:
|
||||||
|
|
||||||
def _mime_for_path(path: str) -> str:
|
def _mime_for_path(path: str) -> str:
|
||||||
"Mime type for provided path/filename."
|
"Mime type for provided path/filename."
|
||||||
if path.endswith(".css"):
|
|
||||||
# some users may have invalid mime type in the Windows registry
|
_, ext = os.path.splitext(path)
|
||||||
return "text/css"
|
ext = ext.lower()
|
||||||
elif path.endswith(".js") or path.endswith(".mjs"):
|
|
||||||
return "application/javascript"
|
# Badly-behaved apps on Windows can alter the standard mime types in the registry, which can completely
|
||||||
|
# break Anki's UI. So we hard-code the most common extensions.
|
||||||
|
mime_types = {
|
||||||
|
".css": "text/css",
|
||||||
|
".js": "application/javascript",
|
||||||
|
".mjs": "application/javascript",
|
||||||
|
".html": "text/html",
|
||||||
|
".htm": "text/html",
|
||||||
|
".svg": "image/svg+xml",
|
||||||
|
".png": "image/png",
|
||||||
|
".jpg": "image/jpeg",
|
||||||
|
".jpeg": "image/jpeg",
|
||||||
|
".gif": "image/gif",
|
||||||
|
".webp": "image/webp",
|
||||||
|
".ico": "image/x-icon",
|
||||||
|
".json": "application/json",
|
||||||
|
".woff": "font/woff",
|
||||||
|
".woff2": "font/woff2",
|
||||||
|
".ttf": "font/ttf",
|
||||||
|
".otf": "font/otf",
|
||||||
|
".mp3": "audio/mpeg",
|
||||||
|
".mp4": "video/mp4",
|
||||||
|
".webm": "video/webm",
|
||||||
|
".ogg": "audio/ogg",
|
||||||
|
".pdf": "application/pdf",
|
||||||
|
".txt": "text/plain",
|
||||||
|
}
|
||||||
|
|
||||||
|
if mime := mime_types.get(ext):
|
||||||
|
return mime
|
||||||
else:
|
else:
|
||||||
# autodetect
|
# fallback to mimetypes, which may consult the registry
|
||||||
mime, _encoding = mimetypes.guess_type(path)
|
mime, _encoding = mimetypes.guess_type(path)
|
||||||
return mime or "application/octet-stream"
|
return mime or "application/octet-stream"
|
||||||
|
|
||||||
|
|
@ -601,7 +630,7 @@ def deck_options_ready() -> bytes:
|
||||||
|
|
||||||
def save_custom_colours() -> bytes:
|
def save_custom_colours() -> bytes:
|
||||||
colors = [
|
colors = [
|
||||||
QColorDialog.customColor(i).name(QColor.NameFormat.HexArgb)
|
QColorDialog.customColor(i).name(QColor.NameFormat.HexRgb)
|
||||||
for i in range(QColorDialog.customCount())
|
for i in range(QColorDialog.customCount())
|
||||||
]
|
]
|
||||||
aqt.mw.col.set_config("customColorPickerPalette", colors)
|
aqt.mw.col.set_config("customColorPickerPalette", colors)
|
||||||
|
|
@ -630,6 +659,7 @@ post_handler_list = [
|
||||||
exposed_backend_list = [
|
exposed_backend_list = [
|
||||||
# CollectionService
|
# CollectionService
|
||||||
"latest_progress",
|
"latest_progress",
|
||||||
|
"get_custom_colours",
|
||||||
# DeckService
|
# DeckService
|
||||||
"get_deck_names",
|
"get_deck_names",
|
||||||
# I18nService
|
# I18nService
|
||||||
|
|
|
||||||
|
|
@ -260,6 +260,7 @@ class Preferences(QDialog):
|
||||||
self.update_login_status()
|
self.update_login_status()
|
||||||
self.confirm_sync_after_login()
|
self.confirm_sync_after_login()
|
||||||
|
|
||||||
|
self.update_network()
|
||||||
sync_login(self.mw, on_success)
|
sync_login(self.mw, on_success)
|
||||||
|
|
||||||
def sync_logout(self) -> None:
|
def sync_logout(self) -> None:
|
||||||
|
|
|
||||||
|
|
@ -209,11 +209,20 @@ def on_full_sync_timer(mw: aqt.main.AnkiQt, label: str) -> None:
|
||||||
return
|
return
|
||||||
sync_progress = progress.full_sync
|
sync_progress = progress.full_sync
|
||||||
|
|
||||||
|
# If we've reached total, show the "checking" label
|
||||||
if sync_progress.transferred == sync_progress.total:
|
if sync_progress.transferred == sync_progress.total:
|
||||||
label = tr.sync_checking()
|
label = tr.sync_checking()
|
||||||
|
|
||||||
|
total = sync_progress.total
|
||||||
|
transferred = sync_progress.transferred
|
||||||
|
|
||||||
|
# Scale both to kilobytes with floor division
|
||||||
|
max_for_bar = total // 1024
|
||||||
|
value_for_bar = transferred // 1024
|
||||||
|
|
||||||
mw.progress.update(
|
mw.progress.update(
|
||||||
value=sync_progress.transferred,
|
value=value_for_bar,
|
||||||
max=sync_progress.total,
|
max=max_for_bar,
|
||||||
process=False,
|
process=False,
|
||||||
label=label,
|
label=label,
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -94,8 +94,15 @@ class TTSPlayer:
|
||||||
|
|
||||||
rank -= 1
|
rank -= 1
|
||||||
|
|
||||||
# if no preferred voices match, we fall back on language
|
# if no requested voices match, use a preferred fallback voice
|
||||||
# with a rank of -100
|
# (for example, Apple Samantha) with rank of -50
|
||||||
|
for avail in avail_voices:
|
||||||
|
if avail.lang == tag.lang:
|
||||||
|
if avail.lang == "en_US" and avail.name.startswith("Apple_Samantha"):
|
||||||
|
return TTSVoiceMatch(voice=avail, rank=-50)
|
||||||
|
|
||||||
|
# if no requested or preferred voices match, we fall back on
|
||||||
|
# the first available voice for the language, with a rank of -100
|
||||||
for avail in avail_voices:
|
for avail in avail_voices:
|
||||||
if avail.lang == tag.lang:
|
if avail.lang == tag.lang:
|
||||||
return TTSVoiceMatch(voice=avail, rank=-100)
|
return TTSVoiceMatch(voice=avail, rank=-100)
|
||||||
|
|
|
||||||
|
|
@ -809,7 +809,7 @@ def ensureWidgetInScreenBoundaries(widget: QWidget) -> None:
|
||||||
wsize = widget.size()
|
wsize = widget.size()
|
||||||
cappedWidth = min(geom.width(), wsize.width())
|
cappedWidth = min(geom.width(), wsize.width())
|
||||||
cappedHeight = min(geom.height(), wsize.height())
|
cappedHeight = min(geom.height(), wsize.height())
|
||||||
if cappedWidth > wsize.width() or cappedHeight > wsize.height():
|
if cappedWidth < wsize.width() or cappedHeight < wsize.height():
|
||||||
widget.resize(QSize(cappedWidth, cappedHeight))
|
widget.resize(QSize(cappedWidth, cappedHeight))
|
||||||
|
|
||||||
# ensure widget is inside top left
|
# ensure widget is inside top left
|
||||||
|
|
|
||||||
|
|
@ -8,11 +8,13 @@ publish = false
|
||||||
rust-version.workspace = true
|
rust-version.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
anki_i18n.workspace = true
|
||||||
anki_io.workspace = true
|
anki_io.workspace = true
|
||||||
anki_process.workspace = true
|
anki_process.workspace = true
|
||||||
anyhow.workspace = true
|
anyhow.workspace = true
|
||||||
camino.workspace = true
|
camino.workspace = true
|
||||||
dirs.workspace = true
|
dirs.workspace = true
|
||||||
|
locale_config.workspace = true
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
|
|
||||||
[target.'cfg(all(unix, not(target_os = "macos")))'.dependencies]
|
[target.'cfg(all(unix, not(target_os = "macos")))'.dependencies]
|
||||||
|
|
|
||||||
|
|
@ -7,4 +7,7 @@ fn main() {
|
||||||
.manifest_required()
|
.manifest_required()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
println!("cargo:rerun-if-changed=../../out/buildhash");
|
||||||
|
let buildhash = std::fs::read_to_string("../../out/buildhash").unwrap_or_default();
|
||||||
|
println!("cargo:rustc-env=BUILDHASH={buildhash}");
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,7 @@ use std::process::Command;
|
||||||
use std::time::SystemTime;
|
use std::time::SystemTime;
|
||||||
use std::time::UNIX_EPOCH;
|
use std::time::UNIX_EPOCH;
|
||||||
|
|
||||||
|
use anki_i18n::I18n;
|
||||||
use anki_io::copy_file;
|
use anki_io::copy_file;
|
||||||
use anki_io::create_dir_all;
|
use anki_io::create_dir_all;
|
||||||
use anki_io::modified_time;
|
use anki_io::modified_time;
|
||||||
|
|
@ -31,6 +32,7 @@ use crate::platform::respawn_launcher;
|
||||||
mod platform;
|
mod platform;
|
||||||
|
|
||||||
struct State {
|
struct State {
|
||||||
|
tr: I18n<anki_i18n::Launcher>,
|
||||||
current_version: Option<String>,
|
current_version: Option<String>,
|
||||||
prerelease_marker: std::path::PathBuf,
|
prerelease_marker: std::path::PathBuf,
|
||||||
uv_install_root: std::path::PathBuf,
|
uv_install_root: std::path::PathBuf,
|
||||||
|
|
@ -100,7 +102,14 @@ fn run() -> Result<()> {
|
||||||
|
|
||||||
let (exe_dir, resources_dir) = get_exe_and_resources_dirs()?;
|
let (exe_dir, resources_dir) = get_exe_and_resources_dirs()?;
|
||||||
|
|
||||||
|
let locale = locale_config::Locale::user_default().to_string();
|
||||||
|
|
||||||
let mut state = State {
|
let mut state = State {
|
||||||
|
tr: I18n::new(&[if !locale.is_empty() {
|
||||||
|
locale
|
||||||
|
} else {
|
||||||
|
"en".to_owned()
|
||||||
|
}]),
|
||||||
current_version: None,
|
current_version: None,
|
||||||
prerelease_marker: uv_install_root.join("prerelease"),
|
prerelease_marker: uv_install_root.join("prerelease"),
|
||||||
uv_install_root: uv_install_root.clone(),
|
uv_install_root: uv_install_root.clone(),
|
||||||
|
|
@ -143,7 +152,9 @@ fn run() -> Result<()> {
|
||||||
let sync_time = file_timestamp_secs(&state.sync_complete_marker);
|
let sync_time = file_timestamp_secs(&state.sync_complete_marker);
|
||||||
state.pyproject_modified_by_user = pyproject_time > sync_time;
|
state.pyproject_modified_by_user = pyproject_time > sync_time;
|
||||||
let pyproject_has_changed = state.pyproject_modified_by_user;
|
let pyproject_has_changed = state.pyproject_modified_by_user;
|
||||||
if !launcher_requested && !pyproject_has_changed {
|
let different_launcher = diff_launcher_was_installed(&state)?;
|
||||||
|
|
||||||
|
if !launcher_requested && !pyproject_has_changed && !different_launcher {
|
||||||
// If no launcher request and venv is already up to date, launch Anki normally
|
// If no launcher request and venv is already up to date, launch Anki normally
|
||||||
let args: Vec<String> = std::env::args().skip(1).collect();
|
let args: Vec<String> = std::env::args().skip(1).collect();
|
||||||
let cmd = build_python_command(&state, &args)?;
|
let cmd = build_python_command(&state, &args)?;
|
||||||
|
|
@ -160,10 +171,12 @@ fn run() -> Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
print!("\x1B[2J\x1B[H"); // Clear screen and move cursor to top
|
print!("\x1B[2J\x1B[H"); // Clear screen and move cursor to top
|
||||||
println!("\x1B[1mAnki Launcher\x1B[0m\n");
|
println!("\x1B[1m{}\x1B[0m\n", state.tr.launcher_title());
|
||||||
|
|
||||||
ensure_os_supported()?;
|
ensure_os_supported()?;
|
||||||
|
|
||||||
|
println!("{}\n", state.tr.launcher_press_enter_to_install());
|
||||||
|
|
||||||
check_versions(&mut state);
|
check_versions(&mut state);
|
||||||
|
|
||||||
main_menu_loop(&state)?;
|
main_menu_loop(&state)?;
|
||||||
|
|
@ -178,15 +191,18 @@ fn run() -> Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
if cfg!(unix) && !cfg!(target_os = "macos") {
|
if cfg!(unix) && !cfg!(target_os = "macos") {
|
||||||
println!("\nPress enter to start Anki.");
|
println!("\n{}", state.tr.launcher_press_enter_to_start());
|
||||||
let mut input = String::new();
|
let mut input = String::new();
|
||||||
let _ = stdin().read_line(&mut input);
|
let _ = stdin().read_line(&mut input);
|
||||||
} else {
|
} else {
|
||||||
// on Windows/macOS, the user needs to close the terminal/console
|
// on Windows/macOS, the user needs to close the terminal/console
|
||||||
// currently, but ideas on how we can avoid this would be good!
|
// currently, but ideas on how we can avoid this would be good!
|
||||||
println!();
|
println!();
|
||||||
println!("Anki will start shortly.");
|
println!("{}", state.tr.launcher_anki_will_start_shortly());
|
||||||
println!("\x1B[1mYou can close this window.\x1B[0m\n");
|
println!(
|
||||||
|
"\x1B[1m{}\x1B[0m\n",
|
||||||
|
state.tr.launcher_you_can_close_this_window()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// respawn the launcher as a disconnected subprocess for normal startup
|
// respawn the launcher as a disconnected subprocess for normal startup
|
||||||
|
|
@ -258,7 +274,7 @@ fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Re
|
||||||
// Remove sync marker before attempting sync
|
// Remove sync marker before attempting sync
|
||||||
let _ = remove_file(&state.sync_complete_marker);
|
let _ = remove_file(&state.sync_complete_marker);
|
||||||
|
|
||||||
println!("Updating Anki...\n");
|
println!("{}\n", state.tr.launcher_updating_anki());
|
||||||
|
|
||||||
let python_version_trimmed = if state.user_python_version_path.exists() {
|
let python_version_trimmed = if state.user_python_version_path.exists() {
|
||||||
let python_version = read_file(&state.user_python_version_path)?;
|
let python_version = read_file(&state.user_python_version_path)?;
|
||||||
|
|
@ -311,7 +327,6 @@ fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Re
|
||||||
}
|
}
|
||||||
|
|
||||||
command
|
command
|
||||||
.env("UV_CACHE_DIR", &state.uv_cache_dir)
|
|
||||||
.env("UV_PYTHON_INSTALL_DIR", &state.uv_python_install_dir)
|
.env("UV_PYTHON_INSTALL_DIR", &state.uv_python_install_dir)
|
||||||
.env(
|
.env(
|
||||||
"UV_HTTP_TIMEOUT",
|
"UV_HTTP_TIMEOUT",
|
||||||
|
|
@ -330,10 +345,6 @@ fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Re
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if state.no_cache_marker.exists() {
|
|
||||||
command.env("UV_NO_CACHE", "1");
|
|
||||||
}
|
|
||||||
|
|
||||||
match command.ensure_success() {
|
match command.ensure_success() {
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
// Sync succeeded
|
// Sync succeeded
|
||||||
|
|
@ -378,10 +389,10 @@ fn main_menu_loop(state: &State) -> Result<()> {
|
||||||
// Toggle beta prerelease file
|
// Toggle beta prerelease file
|
||||||
if state.prerelease_marker.exists() {
|
if state.prerelease_marker.exists() {
|
||||||
let _ = remove_file(&state.prerelease_marker);
|
let _ = remove_file(&state.prerelease_marker);
|
||||||
println!("Beta releases disabled.");
|
println!("{}", state.tr.launcher_beta_releases_disabled());
|
||||||
} else {
|
} else {
|
||||||
write_file(&state.prerelease_marker, "")?;
|
write_file(&state.prerelease_marker, "")?;
|
||||||
println!("Beta releases enabled.");
|
println!("{}", state.tr.launcher_beta_releases_enabled());
|
||||||
}
|
}
|
||||||
println!();
|
println!();
|
||||||
continue;
|
continue;
|
||||||
|
|
@ -390,14 +401,14 @@ fn main_menu_loop(state: &State) -> Result<()> {
|
||||||
// Toggle cache disable file
|
// Toggle cache disable file
|
||||||
if state.no_cache_marker.exists() {
|
if state.no_cache_marker.exists() {
|
||||||
let _ = remove_file(&state.no_cache_marker);
|
let _ = remove_file(&state.no_cache_marker);
|
||||||
println!("Download caching enabled.");
|
println!("{}", state.tr.launcher_download_caching_enabled());
|
||||||
} else {
|
} else {
|
||||||
write_file(&state.no_cache_marker, "")?;
|
write_file(&state.no_cache_marker, "")?;
|
||||||
// Delete the cache directory and everything in it
|
// Delete the cache directory and everything in it
|
||||||
if state.uv_cache_dir.exists() {
|
if state.uv_cache_dir.exists() {
|
||||||
let _ = anki_io::remove_dir_all(&state.uv_cache_dir);
|
let _ = anki_io::remove_dir_all(&state.uv_cache_dir);
|
||||||
}
|
}
|
||||||
println!("Download caching disabled and cache cleared.");
|
println!("{}", state.tr.launcher_download_caching_disabled());
|
||||||
}
|
}
|
||||||
println!();
|
println!();
|
||||||
continue;
|
continue;
|
||||||
|
|
@ -440,44 +451,62 @@ fn file_timestamp_secs(path: &std::path::Path) -> i64 {
|
||||||
|
|
||||||
fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
|
fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
|
||||||
loop {
|
loop {
|
||||||
println!("1) Latest Anki (press Enter)");
|
println!("1) {}", state.tr.launcher_latest_anki());
|
||||||
println!("2) Choose a version");
|
println!("2) {}", state.tr.launcher_choose_a_version());
|
||||||
|
|
||||||
if let Some(current_version) = &state.current_version {
|
if let Some(current_version) = &state.current_version {
|
||||||
let normalized_current = normalize_version(current_version);
|
let normalized_current = normalize_version(current_version);
|
||||||
|
|
||||||
if state.pyproject_modified_by_user {
|
if state.pyproject_modified_by_user {
|
||||||
println!("3) Sync project changes");
|
println!("3) {}", state.tr.launcher_sync_project_changes());
|
||||||
} else {
|
} else {
|
||||||
println!("3) Keep existing version ({normalized_current})");
|
println!(
|
||||||
|
"3) {}",
|
||||||
|
state.tr.launcher_keep_existing_version(normalized_current)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(prev_version) = &state.previous_version {
|
if let Some(prev_version) = &state.previous_version {
|
||||||
if state.current_version.as_ref() != Some(prev_version) {
|
if state.current_version.as_ref() != Some(prev_version) {
|
||||||
let normalized_prev = normalize_version(prev_version);
|
let normalized_prev = normalize_version(prev_version);
|
||||||
println!("4) Revert to previous version ({normalized_prev})");
|
println!(
|
||||||
|
"4) {}",
|
||||||
|
state.tr.launcher_revert_to_previous(normalized_prev)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
println!();
|
println!();
|
||||||
|
|
||||||
let betas_enabled = state.prerelease_marker.exists();
|
let betas_enabled = state.prerelease_marker.exists();
|
||||||
println!(
|
println!(
|
||||||
"5) Allow betas: {}",
|
"5) {}",
|
||||||
if betas_enabled { "on" } else { "off" }
|
state.tr.launcher_allow_betas(if betas_enabled {
|
||||||
|
state.tr.launcher_on()
|
||||||
|
} else {
|
||||||
|
state.tr.launcher_off()
|
||||||
|
})
|
||||||
);
|
);
|
||||||
let cache_enabled = !state.no_cache_marker.exists();
|
let cache_enabled = !state.no_cache_marker.exists();
|
||||||
println!(
|
println!(
|
||||||
"6) Cache downloads: {}",
|
"6) {}",
|
||||||
if cache_enabled { "on" } else { "off" }
|
state.tr.launcher_cache_downloads(if cache_enabled {
|
||||||
|
state.tr.launcher_on()
|
||||||
|
} else {
|
||||||
|
state.tr.launcher_off()
|
||||||
|
})
|
||||||
);
|
);
|
||||||
let mirror_enabled = is_mirror_enabled(state);
|
let mirror_enabled = is_mirror_enabled(state);
|
||||||
println!(
|
println!(
|
||||||
"7) Download mirror: {}",
|
"7) {}",
|
||||||
if mirror_enabled { "on" } else { "off" }
|
state.tr.launcher_download_mirror(if mirror_enabled {
|
||||||
|
state.tr.launcher_on()
|
||||||
|
} else {
|
||||||
|
state.tr.launcher_off()
|
||||||
|
})
|
||||||
);
|
);
|
||||||
println!();
|
println!();
|
||||||
println!("8) Uninstall");
|
println!("8) {}", state.tr.launcher_uninstall());
|
||||||
print!("> ");
|
print!("> ");
|
||||||
let _ = stdout().flush();
|
let _ = stdout().flush();
|
||||||
|
|
||||||
|
|
@ -499,7 +528,7 @@ fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
|
||||||
if state.current_version.is_some() {
|
if state.current_version.is_some() {
|
||||||
MainMenuChoice::KeepExisting
|
MainMenuChoice::KeepExisting
|
||||||
} else {
|
} else {
|
||||||
println!("Invalid input. Please try again.\n");
|
println!("{}\n", state.tr.launcher_invalid_input());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -511,7 +540,7 @@ fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
println!("Invalid input. Please try again.\n");
|
println!("{}\n", state.tr.launcher_invalid_input());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
"5" => MainMenuChoice::ToggleBetas,
|
"5" => MainMenuChoice::ToggleBetas,
|
||||||
|
|
@ -519,7 +548,7 @@ fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
|
||||||
"7" => MainMenuChoice::DownloadMirror,
|
"7" => MainMenuChoice::DownloadMirror,
|
||||||
"8" => MainMenuChoice::Uninstall,
|
"8" => MainMenuChoice::Uninstall,
|
||||||
_ => {
|
_ => {
|
||||||
println!("Invalid input. Please try again.");
|
println!("{}\n", state.tr.launcher_invalid_input());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
@ -534,9 +563,9 @@ fn get_version_kind(state: &State) -> Result<Option<VersionKind>> {
|
||||||
.map(|v| v.as_str())
|
.map(|v| v.as_str())
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(", ");
|
.join(", ");
|
||||||
println!("Latest releases: {releases_str}");
|
println!("{}", state.tr.launcher_latest_releases(releases_str));
|
||||||
|
|
||||||
println!("Enter the version you want to install:");
|
println!("{}", state.tr.launcher_enter_the_version_you_want());
|
||||||
print!("> ");
|
print!("> ");
|
||||||
let _ = stdout().flush();
|
let _ = stdout().flush();
|
||||||
|
|
||||||
|
|
@ -560,29 +589,38 @@ fn get_version_kind(state: &State) -> Result<Option<VersionKind>> {
|
||||||
Ok(Some(version_kind))
|
Ok(Some(version_kind))
|
||||||
}
|
}
|
||||||
(None, true) => {
|
(None, true) => {
|
||||||
println!("Versions before 2.1.50 can't be installed.");
|
println!("{}", state.tr.launcher_versions_before_cant_be_installed());
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
println!("Invalid version.\n");
|
println!("{}\n", state.tr.launcher_invalid_version());
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn with_only_latest_patch(versions: &[String]) -> Vec<String> {
|
fn with_only_latest_patch(versions: &[String]) -> Vec<String> {
|
||||||
// Only show the latest patch release for a given (major, minor)
|
// Assumes versions are sorted in descending order (newest first)
|
||||||
|
// Only show the latest patch release for a given (major, minor),
|
||||||
|
// and exclude pre-releases if a newer major_minor exists
|
||||||
let mut seen_major_minor = std::collections::HashSet::new();
|
let mut seen_major_minor = std::collections::HashSet::new();
|
||||||
versions
|
versions
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|v| {
|
.filter(|v| {
|
||||||
let (major, minor, _, _) = parse_version_for_filtering(v);
|
let (major, minor, _, is_prerelease) = parse_version_for_filtering(v);
|
||||||
if major == 2 {
|
if major == 2 {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
let major_minor = (major, minor);
|
let major_minor = (major, minor);
|
||||||
if seen_major_minor.contains(&major_minor) {
|
if seen_major_minor.contains(&major_minor) {
|
||||||
false
|
false
|
||||||
|
} else if is_prerelease
|
||||||
|
&& seen_major_minor
|
||||||
|
.iter()
|
||||||
|
.any(|&(seen_major, seen_minor)| (seen_major, seen_minor) > (major, minor))
|
||||||
|
{
|
||||||
|
// Exclude pre-release if a newer major_minor exists
|
||||||
|
false
|
||||||
} else {
|
} else {
|
||||||
seen_major_minor.insert(major_minor);
|
seen_major_minor.insert(major_minor);
|
||||||
true
|
true
|
||||||
|
|
@ -700,7 +738,7 @@ fn fetch_versions(state: &State) -> Result<Vec<String>> {
|
||||||
let output = match cmd.utf8_output() {
|
let output = match cmd.utf8_output() {
|
||||||
Ok(output) => output,
|
Ok(output) => output,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
print!("Unable to check for Anki versions. Please check your internet connection.\n\n");
|
print!("{}\n\n", state.tr.launcher_unable_to_check_for_versions());
|
||||||
return Err(e.into());
|
return Err(e.into());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
@ -709,7 +747,7 @@ fn fetch_versions(state: &State) -> Result<Vec<String>> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_releases(state: &State) -> Result<Releases> {
|
fn get_releases(state: &State) -> Result<Releases> {
|
||||||
println!("Checking for updates...");
|
println!("{}", state.tr.launcher_checking_for_updates());
|
||||||
let include_prereleases = state.prerelease_marker.exists();
|
let include_prereleases = state.prerelease_marker.exists();
|
||||||
let all_versions = fetch_versions(state)?;
|
let all_versions = fetch_versions(state)?;
|
||||||
let all_versions = filter_and_normalize_versions(all_versions, include_prereleases);
|
let all_versions = filter_and_normalize_versions(all_versions, include_prereleases);
|
||||||
|
|
@ -911,7 +949,7 @@ fn get_anki_addons21_path() -> Result<std::path::PathBuf> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_uninstall(state: &State) -> Result<bool> {
|
fn handle_uninstall(state: &State) -> Result<bool> {
|
||||||
println!("Uninstall Anki's program files? (y/n)");
|
println!("{}", state.tr.launcher_uninstall_confirm());
|
||||||
print!("> ");
|
print!("> ");
|
||||||
let _ = stdout().flush();
|
let _ = stdout().flush();
|
||||||
|
|
||||||
|
|
@ -920,7 +958,7 @@ fn handle_uninstall(state: &State) -> Result<bool> {
|
||||||
let input = input.trim().to_lowercase();
|
let input = input.trim().to_lowercase();
|
||||||
|
|
||||||
if input != "y" {
|
if input != "y" {
|
||||||
println!("Uninstall cancelled.");
|
println!("{}", state.tr.launcher_uninstall_cancelled());
|
||||||
println!();
|
println!();
|
||||||
return Ok(false);
|
return Ok(false);
|
||||||
}
|
}
|
||||||
|
|
@ -928,11 +966,11 @@ fn handle_uninstall(state: &State) -> Result<bool> {
|
||||||
// Remove program files
|
// Remove program files
|
||||||
if state.uv_install_root.exists() {
|
if state.uv_install_root.exists() {
|
||||||
anki_io::remove_dir_all(&state.uv_install_root)?;
|
anki_io::remove_dir_all(&state.uv_install_root)?;
|
||||||
println!("Program files removed.");
|
println!("{}", state.tr.launcher_program_files_removed());
|
||||||
}
|
}
|
||||||
|
|
||||||
println!();
|
println!();
|
||||||
println!("Remove all profiles/cards? (y/n)");
|
println!("{}", state.tr.launcher_remove_all_profiles_confirm());
|
||||||
print!("> ");
|
print!("> ");
|
||||||
let _ = stdout().flush();
|
let _ = stdout().flush();
|
||||||
|
|
||||||
|
|
@ -942,7 +980,7 @@ fn handle_uninstall(state: &State) -> Result<bool> {
|
||||||
|
|
||||||
if input == "y" && state.anki_base_folder.exists() {
|
if input == "y" && state.anki_base_folder.exists() {
|
||||||
anki_io::remove_dir_all(&state.anki_base_folder)?;
|
anki_io::remove_dir_all(&state.anki_base_folder)?;
|
||||||
println!("User data removed.");
|
println!("{}", state.tr.launcher_user_data_removed());
|
||||||
}
|
}
|
||||||
|
|
||||||
println!();
|
println!();
|
||||||
|
|
@ -981,6 +1019,15 @@ fn uv_command(state: &State) -> Result<Command> {
|
||||||
.env("UV_DEFAULT_INDEX", &pypi_mirror);
|
.env("UV_DEFAULT_INDEX", &pypi_mirror);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if state.no_cache_marker.exists() {
|
||||||
|
command.env("UV_NO_CACHE", "1");
|
||||||
|
} else {
|
||||||
|
command.env("UV_CACHE_DIR", &state.uv_cache_dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
// have uv use the system certstore instead of webpki-roots'
|
||||||
|
command.env("UV_NATIVE_TLS", "1");
|
||||||
|
|
||||||
Ok(command)
|
Ok(command)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1036,9 +1083,9 @@ fn get_mirror_urls(state: &State) -> Result<Option<(String, String)>> {
|
||||||
|
|
||||||
fn show_mirror_submenu(state: &State) -> Result<()> {
|
fn show_mirror_submenu(state: &State) -> Result<()> {
|
||||||
loop {
|
loop {
|
||||||
println!("Download mirror options:");
|
println!("{}", state.tr.launcher_download_mirror_options());
|
||||||
println!("1) No mirror");
|
println!("1) {}", state.tr.launcher_mirror_no_mirror());
|
||||||
println!("2) China");
|
println!("2) {}", state.tr.launcher_mirror_china());
|
||||||
print!("> ");
|
print!("> ");
|
||||||
let _ = stdout().flush();
|
let _ = stdout().flush();
|
||||||
|
|
||||||
|
|
@ -1052,14 +1099,14 @@ fn show_mirror_submenu(state: &State) -> Result<()> {
|
||||||
if state.mirror_path.exists() {
|
if state.mirror_path.exists() {
|
||||||
let _ = remove_file(&state.mirror_path);
|
let _ = remove_file(&state.mirror_path);
|
||||||
}
|
}
|
||||||
println!("Mirror disabled.");
|
println!("{}", state.tr.launcher_mirror_disabled());
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
"2" => {
|
"2" => {
|
||||||
// Write China mirror URLs
|
// Write China mirror URLs
|
||||||
let china_mirrors = "https://registry.npmmirror.com/-/binary/python-build-standalone/\nhttps://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple/";
|
let china_mirrors = "https://registry.npmmirror.com/-/binary/python-build-standalone/\nhttps://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple/";
|
||||||
write_file(&state.mirror_path, china_mirrors)?;
|
write_file(&state.mirror_path, china_mirrors)?;
|
||||||
println!("China mirror enabled.");
|
println!("{}", state.tr.launcher_mirror_china_enabled());
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
"" => {
|
"" => {
|
||||||
|
|
@ -1067,7 +1114,7 @@ fn show_mirror_submenu(state: &State) -> Result<()> {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
println!("Invalid input. Please try again.");
|
println!("{}", state.tr.launcher_invalid_input());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1075,6 +1122,20 @@ fn show_mirror_submenu(state: &State) -> Result<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn diff_launcher_was_installed(state: &State) -> Result<bool> {
|
||||||
|
let launcher_version = option_env!("BUILDHASH").unwrap_or("dev").trim();
|
||||||
|
let launcher_version_path = state.uv_install_root.join("launcher-version");
|
||||||
|
if let Ok(content) = read_file(&launcher_version_path) {
|
||||||
|
if let Ok(version_str) = String::from_utf8(content) {
|
||||||
|
if version_str.trim() == launcher_version {
|
||||||
|
return Ok(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
write_file(launcher_version_path, launcher_version)?;
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
|
||||||
|
|
@ -134,5 +134,8 @@ pub fn ensure_os_supported() -> Result<()> {
|
||||||
#[cfg(all(unix, not(target_os = "macos")))]
|
#[cfg(all(unix, not(target_os = "macos")))]
|
||||||
unix::ensure_glibc_supported()?;
|
unix::ensure_glibc_supported()?;
|
||||||
|
|
||||||
|
#[cfg(target_os = "windows")]
|
||||||
|
windows::ensure_windows_version_supported()?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -38,6 +38,26 @@ fn is_windows_10() -> bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Ensures Windows 10 version 1809 or later
|
||||||
|
pub fn ensure_windows_version_supported() -> Result<()> {
|
||||||
|
unsafe {
|
||||||
|
let mut info = OSVERSIONINFOW {
|
||||||
|
dwOSVersionInfoSize: std::mem::size_of::<OSVERSIONINFOW>() as u32,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
if RtlGetVersion(&mut info).is_err() {
|
||||||
|
anyhow::bail!("Failed to get Windows version information");
|
||||||
|
}
|
||||||
|
|
||||||
|
if info.dwBuildNumber >= 17763 {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
anyhow::bail!("Windows 10 version 1809 or later is required.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn ensure_terminal_shown() -> Result<()> {
|
pub fn ensure_terminal_shown() -> Result<()> {
|
||||||
unsafe {
|
unsafe {
|
||||||
if !GetConsoleWindow().is_invalid() {
|
if !GetConsoleWindow().is_invalid() {
|
||||||
|
|
|
||||||
|
|
@ -23,10 +23,10 @@ use write_strings::write_strings;
|
||||||
|
|
||||||
fn main() -> Result<()> {
|
fn main() -> Result<()> {
|
||||||
// generate our own requirements
|
// generate our own requirements
|
||||||
let map = get_ftl_data();
|
let mut map = get_ftl_data();
|
||||||
check(&map);
|
check(&map);
|
||||||
let modules = get_modules(&map);
|
let mut modules = get_modules(&map);
|
||||||
write_strings(&map, &modules);
|
write_strings(&map, &modules, "strings.rs", "All");
|
||||||
|
|
||||||
typescript::write_ts_interface(&modules)?;
|
typescript::write_ts_interface(&modules)?;
|
||||||
python::write_py_interface(&modules)?;
|
python::write_py_interface(&modules)?;
|
||||||
|
|
@ -41,5 +41,12 @@ fn main() -> Result<()> {
|
||||||
write_file_if_changed(path, meta_json)?;
|
write_file_if_changed(path, meta_json)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// generate strings for the launcher
|
||||||
|
map.iter_mut()
|
||||||
|
.for_each(|(_, modules)| modules.retain(|module, _| module == "launcher"));
|
||||||
|
modules.retain(|module| module.name == "launcher");
|
||||||
|
write_strings(&map, &modules, "strings_launcher.rs", "Launcher");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,15 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
// Include auto-generated content
|
|
||||||
|
|
||||||
#![allow(clippy::all)]
|
#![allow(clippy::all)]
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct All;
|
||||||
|
|
||||||
|
// Include auto-generated content
|
||||||
include!(concat!(env!("OUT_DIR"), "/strings.rs"));
|
include!(concat!(env!("OUT_DIR"), "/strings.rs"));
|
||||||
|
|
||||||
|
impl Translations for All {
|
||||||
|
const STRINGS: &phf::Map<&str, &phf::Map<&str, &str>> = &_STRINGS;
|
||||||
|
const KEYS_BY_MODULE: &[&[&str]] = &_KEYS_BY_MODULE;
|
||||||
|
}
|
||||||
|
|
|
||||||
15
rslib/i18n/src/generated_launcher.rs
Normal file
15
rslib/i18n/src/generated_launcher.rs
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
#![allow(clippy::all)]
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Launcher;
|
||||||
|
|
||||||
|
// Include auto-generated content
|
||||||
|
include!(concat!(env!("OUT_DIR"), "/strings_launcher.rs"));
|
||||||
|
|
||||||
|
impl Translations for Launcher {
|
||||||
|
const STRINGS: &phf::Map<&str, &phf::Map<&str, &str>> = &_STRINGS;
|
||||||
|
const KEYS_BY_MODULE: &[&[&str]] = &_KEYS_BY_MODULE;
|
||||||
|
}
|
||||||
|
|
@ -2,8 +2,10 @@
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
mod generated;
|
mod generated;
|
||||||
|
mod generated_launcher;
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
use std::marker::PhantomData;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::sync::Mutex;
|
use std::sync::Mutex;
|
||||||
|
|
||||||
|
|
@ -12,8 +14,6 @@ use fluent::FluentArgs;
|
||||||
use fluent::FluentResource;
|
use fluent::FluentResource;
|
||||||
use fluent::FluentValue;
|
use fluent::FluentValue;
|
||||||
use fluent_bundle::bundle::FluentBundle as FluentBundleOrig;
|
use fluent_bundle::bundle::FluentBundle as FluentBundleOrig;
|
||||||
use generated::KEYS_BY_MODULE;
|
|
||||||
use generated::STRINGS;
|
|
||||||
use num_format::Locale;
|
use num_format::Locale;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use unic_langid::LanguageIdentifier;
|
use unic_langid::LanguageIdentifier;
|
||||||
|
|
@ -22,6 +22,9 @@ type FluentBundle<T> = FluentBundleOrig<T, intl_memoizer::concurrent::IntlLangMe
|
||||||
|
|
||||||
pub use fluent::fluent_args as tr_args;
|
pub use fluent::fluent_args as tr_args;
|
||||||
|
|
||||||
|
pub use crate::generated::All;
|
||||||
|
pub use crate::generated_launcher::Launcher;
|
||||||
|
|
||||||
pub trait Number: Into<FluentNumber> {
|
pub trait Number: Into<FluentNumber> {
|
||||||
fn round(self) -> Self;
|
fn round(self) -> Self;
|
||||||
}
|
}
|
||||||
|
|
@ -187,20 +190,67 @@ fn get_bundle_with_extra(
|
||||||
get_bundle(text, extra_text, &locales)
|
get_bundle(text, extra_text, &locales)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub trait Translations {
|
||||||
|
const STRINGS: &phf::Map<&str, &phf::Map<&str, &str>>;
|
||||||
|
const KEYS_BY_MODULE: &[&[&str]];
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct I18n {
|
pub struct I18n<P: Translations = All> {
|
||||||
inner: Arc<Mutex<I18nInner>>,
|
inner: Arc<Mutex<I18nInner>>,
|
||||||
|
_translations_type: std::marker::PhantomData<P>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_key(module_idx: usize, translation_idx: usize) -> &'static str {
|
impl<P: Translations> I18n<P> {
|
||||||
KEYS_BY_MODULE
|
fn get_key(module_idx: usize, translation_idx: usize) -> &'static str {
|
||||||
.get(module_idx)
|
P::KEYS_BY_MODULE
|
||||||
.and_then(|translations| translations.get(translation_idx))
|
.get(module_idx)
|
||||||
.cloned()
|
.and_then(|translations| translations.get(translation_idx))
|
||||||
.unwrap_or("invalid-module-or-translation-index")
|
.cloned()
|
||||||
}
|
.unwrap_or("invalid-module-or-translation-index")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_modules(langs: &[LanguageIdentifier], desired_modules: &[String]) -> Vec<String> {
|
||||||
|
langs
|
||||||
|
.iter()
|
||||||
|
.cloned()
|
||||||
|
.map(|lang| {
|
||||||
|
let mut buf = String::new();
|
||||||
|
let lang_name = remapped_lang_name(&lang);
|
||||||
|
if let Some(strings) = P::STRINGS.get(lang_name) {
|
||||||
|
if desired_modules.is_empty() {
|
||||||
|
// empty list, provide all modules
|
||||||
|
for value in strings.values() {
|
||||||
|
buf.push_str(value)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for module_name in desired_modules {
|
||||||
|
if let Some(text) = strings.get(module_name.as_str()) {
|
||||||
|
buf.push_str(text);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
buf
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This temporarily behaves like the older code; in the future we could
|
||||||
|
/// either access each &str separately, or load them on demand.
|
||||||
|
fn ftl_localized_text(lang: &LanguageIdentifier) -> Option<String> {
|
||||||
|
let lang = remapped_lang_name(lang);
|
||||||
|
if let Some(module) = P::STRINGS.get(lang) {
|
||||||
|
let mut text = String::new();
|
||||||
|
for module_text in module.values() {
|
||||||
|
text.push_str(module_text)
|
||||||
|
}
|
||||||
|
Some(text)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl I18n {
|
|
||||||
pub fn template_only() -> Self {
|
pub fn template_only() -> Self {
|
||||||
Self::new::<&str>(&[])
|
Self::new::<&str>(&[])
|
||||||
}
|
}
|
||||||
|
|
@ -225,7 +275,7 @@ impl I18n {
|
||||||
let mut output_langs = vec![];
|
let mut output_langs = vec![];
|
||||||
for lang in input_langs {
|
for lang in input_langs {
|
||||||
// if the language is bundled in the binary
|
// if the language is bundled in the binary
|
||||||
if let Some(text) = ftl_localized_text(&lang).or_else(|| {
|
if let Some(text) = Self::ftl_localized_text(&lang).or_else(|| {
|
||||||
// when testing, allow missing translations
|
// when testing, allow missing translations
|
||||||
if cfg!(test) {
|
if cfg!(test) {
|
||||||
Some(String::new())
|
Some(String::new())
|
||||||
|
|
@ -244,7 +294,7 @@ impl I18n {
|
||||||
|
|
||||||
// add English templates
|
// add English templates
|
||||||
let template_lang = "en-US".parse().unwrap();
|
let template_lang = "en-US".parse().unwrap();
|
||||||
let template_text = ftl_localized_text(&template_lang).unwrap();
|
let template_text = Self::ftl_localized_text(&template_lang).unwrap();
|
||||||
let template_bundle = get_bundle_with_extra(&template_text, None).unwrap();
|
let template_bundle = get_bundle_with_extra(&template_text, None).unwrap();
|
||||||
bundles.push(template_bundle);
|
bundles.push(template_bundle);
|
||||||
output_langs.push(template_lang);
|
output_langs.push(template_lang);
|
||||||
|
|
@ -261,6 +311,7 @@ impl I18n {
|
||||||
bundles,
|
bundles,
|
||||||
langs: output_langs,
|
langs: output_langs,
|
||||||
})),
|
})),
|
||||||
|
_translations_type: PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -270,7 +321,7 @@ impl I18n {
|
||||||
message_index: usize,
|
message_index: usize,
|
||||||
args: FluentArgs,
|
args: FluentArgs,
|
||||||
) -> String {
|
) -> String {
|
||||||
let key = get_key(module_index, message_index);
|
let key = Self::get_key(module_index, message_index);
|
||||||
self.translate(key, Some(args)).into()
|
self.translate(key, Some(args)).into()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -305,7 +356,7 @@ impl I18n {
|
||||||
/// implementation.
|
/// implementation.
|
||||||
pub fn resources_for_js(&self, desired_modules: &[String]) -> ResourcesForJavascript {
|
pub fn resources_for_js(&self, desired_modules: &[String]) -> ResourcesForJavascript {
|
||||||
let inner = self.inner.lock().unwrap();
|
let inner = self.inner.lock().unwrap();
|
||||||
let resources = get_modules(&inner.langs, desired_modules);
|
let resources = Self::get_modules(&inner.langs, desired_modules);
|
||||||
ResourcesForJavascript {
|
ResourcesForJavascript {
|
||||||
langs: inner.langs.iter().map(ToString::to_string).collect(),
|
langs: inner.langs.iter().map(ToString::to_string).collect(),
|
||||||
resources,
|
resources,
|
||||||
|
|
@ -313,47 +364,6 @@ impl I18n {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_modules(langs: &[LanguageIdentifier], desired_modules: &[String]) -> Vec<String> {
|
|
||||||
langs
|
|
||||||
.iter()
|
|
||||||
.cloned()
|
|
||||||
.map(|lang| {
|
|
||||||
let mut buf = String::new();
|
|
||||||
let lang_name = remapped_lang_name(&lang);
|
|
||||||
if let Some(strings) = STRINGS.get(lang_name) {
|
|
||||||
if desired_modules.is_empty() {
|
|
||||||
// empty list, provide all modules
|
|
||||||
for value in strings.values() {
|
|
||||||
buf.push_str(value)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
for module_name in desired_modules {
|
|
||||||
if let Some(text) = strings.get(module_name.as_str()) {
|
|
||||||
buf.push_str(text);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
buf
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This temporarily behaves like the older code; in the future we could either
|
|
||||||
/// access each &str separately, or load them on demand.
|
|
||||||
fn ftl_localized_text(lang: &LanguageIdentifier) -> Option<String> {
|
|
||||||
let lang = remapped_lang_name(lang);
|
|
||||||
if let Some(module) = STRINGS.get(lang) {
|
|
||||||
let mut text = String::new();
|
|
||||||
for module_text in module.values() {
|
|
||||||
text.push_str(module_text)
|
|
||||||
}
|
|
||||||
Some(text)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct I18nInner {
|
struct I18nInner {
|
||||||
// bundles in preferred language order, with template English as the
|
// bundles in preferred language order, with template English as the
|
||||||
// last element
|
// last element
|
||||||
|
|
@ -490,7 +500,7 @@ mod test {
|
||||||
#[test]
|
#[test]
|
||||||
fn i18n() {
|
fn i18n() {
|
||||||
// English template
|
// English template
|
||||||
let tr = I18n::new(&["zz"]);
|
let tr = I18n::<All>::new(&["zz"]);
|
||||||
assert_eq!(tr.translate("valid-key", None), "a valid key");
|
assert_eq!(tr.translate("valid-key", None), "a valid key");
|
||||||
assert_eq!(tr.translate("invalid-key", None), "invalid-key");
|
assert_eq!(tr.translate("invalid-key", None), "invalid-key");
|
||||||
|
|
||||||
|
|
@ -513,7 +523,7 @@ mod test {
|
||||||
);
|
);
|
||||||
|
|
||||||
// Another language
|
// Another language
|
||||||
let tr = I18n::new(&["ja_JP"]);
|
let tr = I18n::<All>::new(&["ja_JP"]);
|
||||||
assert_eq!(tr.translate("valid-key", None), "キー");
|
assert_eq!(tr.translate("valid-key", None), "キー");
|
||||||
assert_eq!(tr.translate("only-in-english", None), "not translated");
|
assert_eq!(tr.translate("only-in-english", None), "not translated");
|
||||||
assert_eq!(tr.translate("invalid-key", None), "invalid-key");
|
assert_eq!(tr.translate("invalid-key", None), "invalid-key");
|
||||||
|
|
@ -524,7 +534,7 @@ mod test {
|
||||||
);
|
);
|
||||||
|
|
||||||
// Decimal separator
|
// Decimal separator
|
||||||
let tr = I18n::new(&["pl-PL"]);
|
let tr = I18n::<All>::new(&["pl-PL"]);
|
||||||
// Polish will use a comma if the string is translated
|
// Polish will use a comma if the string is translated
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tr.translate("one-arg-key", Some(tr_args!["one"=>2.07])),
|
tr.translate("one-arg-key", Some(tr_args!["one"=>2.07])),
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,7 @@ use crate::extract::VariableKind;
|
||||||
use crate::gather::TranslationsByFile;
|
use crate::gather::TranslationsByFile;
|
||||||
use crate::gather::TranslationsByLang;
|
use crate::gather::TranslationsByLang;
|
||||||
|
|
||||||
pub fn write_strings(map: &TranslationsByLang, modules: &[Module]) {
|
pub fn write_strings(map: &TranslationsByLang, modules: &[Module], out_fn: &str, tag: &str) {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
|
|
||||||
// lang->module map
|
// lang->module map
|
||||||
|
|
@ -25,23 +25,25 @@ pub fn write_strings(map: &TranslationsByLang, modules: &[Module]) {
|
||||||
// ordered list of translations by module
|
// ordered list of translations by module
|
||||||
write_translation_key_index(modules, &mut buf);
|
write_translation_key_index(modules, &mut buf);
|
||||||
// methods to generate messages
|
// methods to generate messages
|
||||||
write_methods(modules, &mut buf);
|
write_methods(modules, &mut buf, tag);
|
||||||
|
|
||||||
let dir = PathBuf::from(std::env::var("OUT_DIR").unwrap());
|
let dir = PathBuf::from(std::env::var("OUT_DIR").unwrap());
|
||||||
let path = dir.join("strings.rs");
|
let path = dir.join(out_fn);
|
||||||
fs::write(path, buf).unwrap();
|
fs::write(path, buf).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_methods(modules: &[Module], buf: &mut String) {
|
fn write_methods(modules: &[Module], buf: &mut String, tag: &str) {
|
||||||
buf.push_str(
|
buf.push_str(
|
||||||
r#"
|
r#"
|
||||||
use crate::{I18n,Number};
|
#[allow(unused_imports)]
|
||||||
|
use crate::{I18n,Number,Translations};
|
||||||
|
#[allow(unused_imports)]
|
||||||
use fluent::{FluentValue, FluentArgs};
|
use fluent::{FluentValue, FluentArgs};
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
|
||||||
impl I18n {
|
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
|
writeln!(buf, "impl I18n<{tag}> {{").unwrap();
|
||||||
for module in modules {
|
for module in modules {
|
||||||
for translation in &module.translations {
|
for translation in &module.translations {
|
||||||
let func = translation.key.to_snake_case();
|
let func = translation.key.to_snake_case();
|
||||||
|
|
@ -142,7 +144,7 @@ fn write_translation_key_index(modules: &[Module], buf: &mut String) {
|
||||||
|
|
||||||
writeln!(
|
writeln!(
|
||||||
buf,
|
buf,
|
||||||
"pub(crate) const KEYS_BY_MODULE: [&[&str]; {count}] = [",
|
"pub(crate) const _KEYS_BY_MODULE: [&[&str]; {count}] = [",
|
||||||
count = modules.len(),
|
count = modules.len(),
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
@ -162,7 +164,7 @@ fn write_translation_key_index(modules: &[Module], buf: &mut String) {
|
||||||
fn write_lang_map(map: &TranslationsByLang, buf: &mut String) {
|
fn write_lang_map(map: &TranslationsByLang, buf: &mut String) {
|
||||||
buf.push_str(
|
buf.push_str(
|
||||||
"
|
"
|
||||||
pub(crate) const STRINGS: phf::Map<&str, &phf::Map<&str, &str>> = phf::phf_map! {
|
pub(crate) const _STRINGS: phf::Map<&str, &phf::Map<&str, &str>> = phf::phf_map! {
|
||||||
",
|
",
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -22,7 +22,7 @@ pub(crate) fn write_python_interface(services: &[BackendService]) -> Result<()>
|
||||||
write_header(&mut out)?;
|
write_header(&mut out)?;
|
||||||
|
|
||||||
for service in services {
|
for service in services {
|
||||||
if service.name == "BackendAnkidroidService" {
|
if ["BackendAnkidroidService", "BackendFrontendService"].contains(&service.name.as_str()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
for method in service.all_methods() {
|
for method in service.all_methods() {
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,7 @@ use std::sync::LazyLock;
|
||||||
use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusion;
|
use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusion;
|
||||||
use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusionShape;
|
use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusionShape;
|
||||||
use htmlescape::encode_attribute;
|
use htmlescape::encode_attribute;
|
||||||
|
use itertools::Itertools;
|
||||||
use nom::branch::alt;
|
use nom::branch::alt;
|
||||||
use nom::bytes::complete::tag;
|
use nom::bytes::complete::tag;
|
||||||
use nom::bytes::complete::take_while;
|
use nom::bytes::complete::take_while;
|
||||||
|
|
@ -26,7 +27,7 @@ use crate::template::RenderContext;
|
||||||
use crate::text::strip_html_preserving_entities;
|
use crate::text::strip_html_preserving_entities;
|
||||||
|
|
||||||
static CLOZE: LazyLock<Regex> =
|
static CLOZE: LazyLock<Regex> =
|
||||||
LazyLock::new(|| Regex::new(r"(?s)\{\{c\d+::(.*?)(::.*?)?\}\}").unwrap());
|
LazyLock::new(|| Regex::new(r"(?s)\{\{c[\d,]+::(.*?)(::.*?)?\}\}").unwrap());
|
||||||
|
|
||||||
static MATHJAX: LazyLock<Regex> = LazyLock::new(|| {
|
static MATHJAX: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
Regex::new(
|
Regex::new(
|
||||||
|
|
@ -48,7 +49,7 @@ mod mathjax_caps {
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
enum Token<'a> {
|
enum Token<'a> {
|
||||||
// The parameter is the cloze number as is appears in the field content.
|
// The parameter is the cloze number as is appears in the field content.
|
||||||
OpenCloze(u16),
|
OpenCloze(Vec<u16>),
|
||||||
Text(&'a str),
|
Text(&'a str),
|
||||||
CloseCloze,
|
CloseCloze,
|
||||||
}
|
}
|
||||||
|
|
@ -58,21 +59,24 @@ fn tokenize(mut text: &str) -> impl Iterator<Item = Token<'_>> {
|
||||||
fn open_cloze(text: &str) -> IResult<&str, Token<'_>> {
|
fn open_cloze(text: &str) -> IResult<&str, Token<'_>> {
|
||||||
// opening brackets and 'c'
|
// opening brackets and 'c'
|
||||||
let (text, _opening_brackets_and_c) = tag("{{c")(text)?;
|
let (text, _opening_brackets_and_c) = tag("{{c")(text)?;
|
||||||
// following number
|
// following comma-seperated numbers
|
||||||
let (text, digits) = take_while(|c: char| c.is_ascii_digit())(text)?;
|
let (text, ordinals) = take_while(|c: char| c.is_ascii_digit() || c == ',')(text)?;
|
||||||
let digits: u16 = match digits.parse() {
|
let ordinals: Vec<u16> = ordinals
|
||||||
Ok(digits) => digits,
|
.split(',')
|
||||||
Err(_) => {
|
.filter_map(|s| s.parse().ok())
|
||||||
// not a valid number; fail to recognize
|
.collect::<HashSet<_>>() // deduplicate
|
||||||
return Err(nom::Err::Error(nom::error::make_error(
|
.into_iter()
|
||||||
text,
|
.sorted() // set conversion can de-order
|
||||||
nom::error::ErrorKind::Digit,
|
.collect();
|
||||||
)));
|
if ordinals.is_empty() {
|
||||||
}
|
return Err(nom::Err::Error(nom::error::make_error(
|
||||||
};
|
text,
|
||||||
|
nom::error::ErrorKind::Digit,
|
||||||
|
)));
|
||||||
|
}
|
||||||
// ::
|
// ::
|
||||||
let (text, _colons) = tag("::")(text)?;
|
let (text, _colons) = tag("::")(text)?;
|
||||||
Ok((text, Token::OpenCloze(digits)))
|
Ok((text, Token::OpenCloze(ordinals)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn close_cloze(text: &str) -> IResult<&str, Token<'_>> {
|
fn close_cloze(text: &str) -> IResult<&str, Token<'_>> {
|
||||||
|
|
@ -121,11 +125,20 @@ enum TextOrCloze<'a> {
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct ExtractedCloze<'a> {
|
struct ExtractedCloze<'a> {
|
||||||
// `ordinal` is the cloze number as is appears in the field content.
|
// `ordinal` is the cloze number as is appears in the field content.
|
||||||
ordinal: u16,
|
ordinals: Vec<u16>,
|
||||||
nodes: Vec<TextOrCloze<'a>>,
|
nodes: Vec<TextOrCloze<'a>>,
|
||||||
hint: Option<&'a str>,
|
hint: Option<&'a str>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Generate a string representation of the ordinals for HTML
|
||||||
|
fn ordinals_str(ordinals: &[u16]) -> String {
|
||||||
|
ordinals
|
||||||
|
.iter()
|
||||||
|
.map(|o| o.to_string())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(",")
|
||||||
|
}
|
||||||
|
|
||||||
impl ExtractedCloze<'_> {
|
impl ExtractedCloze<'_> {
|
||||||
/// Return the cloze's hint, or "..." if none was provided.
|
/// Return the cloze's hint, or "..." if none was provided.
|
||||||
fn hint(&self) -> &str {
|
fn hint(&self) -> &str {
|
||||||
|
|
@ -151,6 +164,11 @@ impl ExtractedCloze<'_> {
|
||||||
buf.into()
|
buf.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Checks if this cloze is active for a given ordinal
|
||||||
|
fn contains_ordinal(&self, ordinal: u16) -> bool {
|
||||||
|
self.ordinals.contains(&ordinal)
|
||||||
|
}
|
||||||
|
|
||||||
/// If cloze starts with image-occlusion:, return the text following that.
|
/// If cloze starts with image-occlusion:, return the text following that.
|
||||||
fn image_occlusion(&self) -> Option<&str> {
|
fn image_occlusion(&self) -> Option<&str> {
|
||||||
let TextOrCloze::Text(text) = self.nodes.first()? else {
|
let TextOrCloze::Text(text) = self.nodes.first()? else {
|
||||||
|
|
@ -165,10 +183,10 @@ fn parse_text_with_clozes(text: &str) -> Vec<TextOrCloze<'_>> {
|
||||||
let mut output = vec![];
|
let mut output = vec![];
|
||||||
for token in tokenize(text) {
|
for token in tokenize(text) {
|
||||||
match token {
|
match token {
|
||||||
Token::OpenCloze(ordinal) => {
|
Token::OpenCloze(ordinals) => {
|
||||||
if open_clozes.len() < 10 {
|
if open_clozes.len() < 10 {
|
||||||
open_clozes.push(ExtractedCloze {
|
open_clozes.push(ExtractedCloze {
|
||||||
ordinal,
|
ordinals,
|
||||||
nodes: Vec::with_capacity(1), // common case
|
nodes: Vec::with_capacity(1), // common case
|
||||||
hint: None,
|
hint: None,
|
||||||
})
|
})
|
||||||
|
|
@ -214,7 +232,7 @@ fn reveal_cloze_text_in_nodes(
|
||||||
output: &mut Vec<String>,
|
output: &mut Vec<String>,
|
||||||
) {
|
) {
|
||||||
if let TextOrCloze::Cloze(cloze) = node {
|
if let TextOrCloze::Cloze(cloze) = node {
|
||||||
if cloze.ordinal == cloze_ord {
|
if cloze.contains_ordinal(cloze_ord) {
|
||||||
if question {
|
if question {
|
||||||
output.push(cloze.hint().into())
|
output.push(cloze.hint().into())
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -234,14 +252,15 @@ fn reveal_cloze(
|
||||||
active_cloze_found_in_text: &mut bool,
|
active_cloze_found_in_text: &mut bool,
|
||||||
buf: &mut String,
|
buf: &mut String,
|
||||||
) {
|
) {
|
||||||
let active = cloze.ordinal == cloze_ord;
|
let active = cloze.contains_ordinal(cloze_ord);
|
||||||
*active_cloze_found_in_text |= active;
|
*active_cloze_found_in_text |= active;
|
||||||
|
|
||||||
if let Some(image_occlusion_text) = cloze.image_occlusion() {
|
if let Some(image_occlusion_text) = cloze.image_occlusion() {
|
||||||
buf.push_str(&render_image_occlusion(
|
buf.push_str(&render_image_occlusion(
|
||||||
image_occlusion_text,
|
image_occlusion_text,
|
||||||
question,
|
question,
|
||||||
active,
|
active,
|
||||||
cloze.ordinal,
|
&cloze.ordinals,
|
||||||
));
|
));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
@ -265,7 +284,7 @@ fn reveal_cloze(
|
||||||
buf,
|
buf,
|
||||||
r#"<span class="cloze" data-cloze="{}" data-ordinal="{}">[{}]</span>"#,
|
r#"<span class="cloze" data-cloze="{}" data-ordinal="{}">[{}]</span>"#,
|
||||||
encode_attribute(&content_buf),
|
encode_attribute(&content_buf),
|
||||||
cloze.ordinal,
|
ordinals_str(&cloze.ordinals),
|
||||||
cloze.hint()
|
cloze.hint()
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
@ -274,7 +293,7 @@ fn reveal_cloze(
|
||||||
write!(
|
write!(
|
||||||
buf,
|
buf,
|
||||||
r#"<span class="cloze" data-ordinal="{}">"#,
|
r#"<span class="cloze" data-ordinal="{}">"#,
|
||||||
cloze.ordinal
|
ordinals_str(&cloze.ordinals)
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
for node in &cloze.nodes {
|
for node in &cloze.nodes {
|
||||||
|
|
@ -292,7 +311,7 @@ fn reveal_cloze(
|
||||||
write!(
|
write!(
|
||||||
buf,
|
buf,
|
||||||
r#"<span class="cloze-inactive" data-ordinal="{}">"#,
|
r#"<span class="cloze-inactive" data-ordinal="{}">"#,
|
||||||
cloze.ordinal
|
ordinals_str(&cloze.ordinals)
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
for node in &cloze.nodes {
|
for node in &cloze.nodes {
|
||||||
|
|
@ -308,23 +327,28 @@ fn reveal_cloze(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_image_occlusion(text: &str, question_side: bool, active: bool, ordinal: u16) -> String {
|
fn render_image_occlusion(
|
||||||
if (question_side && active) || ordinal == 0 {
|
text: &str,
|
||||||
|
question_side: bool,
|
||||||
|
active: bool,
|
||||||
|
ordinals: &[u16],
|
||||||
|
) -> String {
|
||||||
|
if (question_side && active) || ordinals.contains(&0) {
|
||||||
format!(
|
format!(
|
||||||
r#"<div class="cloze" data-ordinal="{}" {}></div>"#,
|
r#"<div class="cloze" data-ordinal="{}" {}></div>"#,
|
||||||
ordinal,
|
ordinals_str(ordinals),
|
||||||
&get_image_cloze_data(text)
|
&get_image_cloze_data(text)
|
||||||
)
|
)
|
||||||
} else if !active {
|
} else if !active {
|
||||||
format!(
|
format!(
|
||||||
r#"<div class="cloze-inactive" data-ordinal="{}" {}></div>"#,
|
r#"<div class="cloze-inactive" data-ordinal="{}" {}></div>"#,
|
||||||
ordinal,
|
ordinals_str(ordinals),
|
||||||
&get_image_cloze_data(text)
|
&get_image_cloze_data(text)
|
||||||
)
|
)
|
||||||
} else if !question_side && active {
|
} else if !question_side && active {
|
||||||
format!(
|
format!(
|
||||||
r#"<div class="cloze-highlight" data-ordinal="{}" {}></div>"#,
|
r#"<div class="cloze-highlight" data-ordinal="{}" {}></div>"#,
|
||||||
ordinal,
|
ordinals_str(ordinals),
|
||||||
&get_image_cloze_data(text)
|
&get_image_cloze_data(text)
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -338,7 +362,10 @@ pub fn parse_image_occlusions(text: &str) -> Vec<ImageOcclusion> {
|
||||||
if let TextOrCloze::Cloze(cloze) = node {
|
if let TextOrCloze::Cloze(cloze) = node {
|
||||||
if cloze.image_occlusion().is_some() {
|
if cloze.image_occlusion().is_some() {
|
||||||
if let Some(shape) = parse_image_cloze(cloze.image_occlusion().unwrap()) {
|
if let Some(shape) = parse_image_cloze(cloze.image_occlusion().unwrap()) {
|
||||||
occlusions.entry(cloze.ordinal).or_default().push(shape);
|
// Associate this occlusion with all ordinals in this cloze
|
||||||
|
for &ordinal in &cloze.ordinals {
|
||||||
|
occlusions.entry(ordinal).or_default().push(shape.clone());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -420,7 +447,7 @@ pub fn expand_clozes_to_reveal_latex(text: &str) -> String {
|
||||||
pub(crate) fn contains_cloze(text: &str) -> bool {
|
pub(crate) fn contains_cloze(text: &str) -> bool {
|
||||||
parse_text_with_clozes(text)
|
parse_text_with_clozes(text)
|
||||||
.iter()
|
.iter()
|
||||||
.any(|node| matches!(node, TextOrCloze::Cloze(e) if e.ordinal != 0))
|
.any(|node| matches!(node, TextOrCloze::Cloze(e) if e.ordinals.iter().any(|&o| o != 0)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the set of cloze number as they appear in the fields's content.
|
/// Returns the set of cloze number as they appear in the fields's content.
|
||||||
|
|
@ -433,10 +460,12 @@ pub fn cloze_numbers_in_string(html: &str) -> HashSet<u16> {
|
||||||
fn add_cloze_numbers_in_text_with_clozes(nodes: &[TextOrCloze], set: &mut HashSet<u16>) {
|
fn add_cloze_numbers_in_text_with_clozes(nodes: &[TextOrCloze], set: &mut HashSet<u16>) {
|
||||||
for node in nodes {
|
for node in nodes {
|
||||||
if let TextOrCloze::Cloze(cloze) = node {
|
if let TextOrCloze::Cloze(cloze) = node {
|
||||||
if cloze.ordinal != 0 {
|
for &ordinal in &cloze.ordinals {
|
||||||
set.insert(cloze.ordinal);
|
if ordinal != 0 {
|
||||||
add_cloze_numbers_in_text_with_clozes(&cloze.nodes, set);
|
set.insert(ordinal);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
add_cloze_numbers_in_text_with_clozes(&cloze.nodes, set);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -654,4 +683,160 @@ mod test {
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn multi_card_card_generation() {
|
||||||
|
let text = "{{c1,2,3::multi}}";
|
||||||
|
assert_eq!(
|
||||||
|
cloze_number_in_fields(vec![text]),
|
||||||
|
vec![1, 2, 3].into_iter().collect::<HashSet<u16>>()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn multi_card_cloze_basic() {
|
||||||
|
let text = "{{c1,2::shared}} word and {{c1::first}} vs {{c2::second}}";
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
|
||||||
|
"[...] word and [...] vs second"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
strip_html(&reveal_cloze_text(text, 2, true)).as_ref(),
|
||||||
|
"[...] word and first vs [...]"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
strip_html(&reveal_cloze_text(text, 1, false)).as_ref(),
|
||||||
|
"shared word and first vs second"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
strip_html(&reveal_cloze_text(text, 2, false)).as_ref(),
|
||||||
|
"shared word and first vs second"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
cloze_numbers_in_string(text),
|
||||||
|
vec![1, 2].into_iter().collect::<HashSet<u16>>()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn multi_card_cloze_html_attributes() {
|
||||||
|
let text = "{{c1,2,3::multi}}";
|
||||||
|
|
||||||
|
let card1_html = reveal_cloze_text(text, 1, true);
|
||||||
|
assert!(card1_html.contains(r#"data-ordinal="1,2,3""#));
|
||||||
|
|
||||||
|
let card2_html = reveal_cloze_text(text, 2, true);
|
||||||
|
assert!(card2_html.contains(r#"data-ordinal="1,2,3""#));
|
||||||
|
|
||||||
|
let card3_html = reveal_cloze_text(text, 3, true);
|
||||||
|
assert!(card3_html.contains(r#"data-ordinal="1,2,3""#));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn multi_card_cloze_with_hints() {
|
||||||
|
let text = "{{c1,2::answer::hint}}";
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
|
||||||
|
"[hint]"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
strip_html(&reveal_cloze_text(text, 2, true)).as_ref(),
|
||||||
|
"[hint]"
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
strip_html(&reveal_cloze_text(text, 1, false)).as_ref(),
|
||||||
|
"answer"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
strip_html(&reveal_cloze_text(text, 2, false)).as_ref(),
|
||||||
|
"answer"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn multi_card_cloze_edge_cases() {
|
||||||
|
assert_eq!(
|
||||||
|
cloze_numbers_in_string("{{c1,1,2::test}}"),
|
||||||
|
vec![1, 2].into_iter().collect::<HashSet<u16>>()
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
cloze_numbers_in_string("{{c0,1,2::test}}"),
|
||||||
|
vec![1, 2].into_iter().collect::<HashSet<u16>>()
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
cloze_numbers_in_string("{{c1,,3::test}}"),
|
||||||
|
vec![1, 3].into_iter().collect::<HashSet<u16>>()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn multi_card_cloze_only_filter() {
|
||||||
|
let text = "{{c1,2::shared}} and {{c1::first}} vs {{c2::second}}";
|
||||||
|
|
||||||
|
assert_eq!(reveal_cloze_text_only(text, 1, true), "..., ...");
|
||||||
|
assert_eq!(reveal_cloze_text_only(text, 2, true), "..., ...");
|
||||||
|
assert_eq!(reveal_cloze_text_only(text, 1, false), "shared, first");
|
||||||
|
assert_eq!(reveal_cloze_text_only(text, 2, false), "shared, second");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn multi_card_nested_cloze() {
|
||||||
|
let text = "{{c1,2::outer {{c3::inner}}}}";
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
|
||||||
|
"[...]"
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
strip_html(&reveal_cloze_text(text, 2, true)).as_ref(),
|
||||||
|
"[...]"
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
strip_html(&reveal_cloze_text(text, 3, true)).as_ref(),
|
||||||
|
"outer [...]"
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
cloze_numbers_in_string(text),
|
||||||
|
vec![1, 2, 3].into_iter().collect::<HashSet<u16>>()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn nested_parent_child_card_same_cloze() {
|
||||||
|
let text = "{{c1::outer {{c1::inner}}}}";
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
|
||||||
|
"[...]"
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
cloze_numbers_in_string(text),
|
||||||
|
vec![1].into_iter().collect::<HashSet<u16>>()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn multi_card_image_occlusion() {
|
||||||
|
let text = "{{c1,2::image-occlusion:rect:left=10:top=20:width=30:height=40}}";
|
||||||
|
|
||||||
|
let occlusions = parse_image_occlusions(text);
|
||||||
|
assert_eq!(occlusions.len(), 2);
|
||||||
|
assert!(occlusions.iter().any(|o| o.ordinal == 1));
|
||||||
|
assert!(occlusions.iter().any(|o| o.ordinal == 2));
|
||||||
|
|
||||||
|
let card1_html = reveal_cloze_text(text, 1, true);
|
||||||
|
assert!(card1_html.contains(r#"data-ordinal="1,2""#));
|
||||||
|
|
||||||
|
let card2_html = reveal_cloze_text(text, 2, true);
|
||||||
|
assert!(card2_html.contains(r#"data-ordinal="1,2""#));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,10 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
use anki_proto::collection::GetCustomColoursResponse;
|
||||||
use anki_proto::generic;
|
use anki_proto::generic;
|
||||||
|
|
||||||
use crate::collection::Collection;
|
use crate::collection::Collection;
|
||||||
|
use crate::config::ConfigKey;
|
||||||
use crate::error;
|
use crate::error;
|
||||||
use crate::prelude::BoolKey;
|
use crate::prelude::BoolKey;
|
||||||
use crate::prelude::Op;
|
use crate::prelude::Op;
|
||||||
|
|
@ -62,4 +64,13 @@ impl crate::services::CollectionService for Collection {
|
||||||
})
|
})
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_custom_colours(
|
||||||
|
&mut self,
|
||||||
|
) -> error::Result<anki_proto::collection::GetCustomColoursResponse> {
|
||||||
|
let colours = self
|
||||||
|
.get_config_optional(ConfigKey::CustomColorPickerPalette)
|
||||||
|
.unwrap_or_default();
|
||||||
|
Ok(GetCustomColoursResponse { colours })
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -71,6 +71,7 @@ pub(crate) enum ConfigKey {
|
||||||
NextNewCardPosition,
|
NextNewCardPosition,
|
||||||
#[strum(to_string = "schedVer")]
|
#[strum(to_string = "schedVer")]
|
||||||
SchedulerVersion,
|
SchedulerVersion,
|
||||||
|
CustomColorPickerPalette,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(PartialEq, Eq, Serialize_repr, Deserialize_repr, Clone, Copy, Debug)]
|
#[derive(PartialEq, Eq, Serialize_repr, Deserialize_repr, Clone, Copy, Debug)]
|
||||||
|
|
|
||||||
|
|
@ -17,6 +17,7 @@ use crate::import_export::package::media::SafeMediaEntry;
|
||||||
use crate::import_export::ImportProgress;
|
use crate::import_export::ImportProgress;
|
||||||
use crate::media::files::add_hash_suffix_to_file_stem;
|
use crate::media::files::add_hash_suffix_to_file_stem;
|
||||||
use crate::media::files::sha1_of_reader;
|
use crate::media::files::sha1_of_reader;
|
||||||
|
use crate::media::Checksums;
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use crate::progress::ThrottlingProgressHandler;
|
use crate::progress::ThrottlingProgressHandler;
|
||||||
|
|
||||||
|
|
@ -75,7 +76,7 @@ impl Context<'_> {
|
||||||
fn prepare_media(
|
fn prepare_media(
|
||||||
media_entries: Vec<SafeMediaEntry>,
|
media_entries: Vec<SafeMediaEntry>,
|
||||||
archive: &mut ZipArchive<File>,
|
archive: &mut ZipArchive<File>,
|
||||||
existing_sha1s: &HashMap<String, Sha1Hash>,
|
existing_sha1s: &Checksums,
|
||||||
progress: &mut ThrottlingProgressHandler<ImportProgress>,
|
progress: &mut ThrottlingProgressHandler<ImportProgress>,
|
||||||
) -> Result<MediaUseMap> {
|
) -> Result<MediaUseMap> {
|
||||||
let mut media_map = MediaUseMap::default();
|
let mut media_map = MediaUseMap::default();
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
use std::collections::HashSet;
|
||||||
use std::io::BufRead;
|
use std::io::BufRead;
|
||||||
use std::io::BufReader;
|
use std::io::BufReader;
|
||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
|
|
@ -106,6 +107,8 @@ struct ColumnContext {
|
||||||
notetype_column: Option<usize>,
|
notetype_column: Option<usize>,
|
||||||
/// Source column indices for the fields of a notetype
|
/// Source column indices for the fields of a notetype
|
||||||
field_source_columns: FieldSourceColumns,
|
field_source_columns: FieldSourceColumns,
|
||||||
|
/// Metadata column indices (1-based)
|
||||||
|
meta_columns: HashSet<usize>,
|
||||||
/// How fields are converted to strings. Used for escaping HTML if
|
/// How fields are converted to strings. Used for escaping HTML if
|
||||||
/// appropriate.
|
/// appropriate.
|
||||||
stringify: fn(&str) -> String,
|
stringify: fn(&str) -> String,
|
||||||
|
|
@ -119,6 +122,7 @@ impl ColumnContext {
|
||||||
deck_column: metadata.deck()?.column(),
|
deck_column: metadata.deck()?.column(),
|
||||||
notetype_column: metadata.notetype()?.column(),
|
notetype_column: metadata.notetype()?.column(),
|
||||||
field_source_columns: metadata.field_source_columns()?,
|
field_source_columns: metadata.field_source_columns()?,
|
||||||
|
meta_columns: metadata.meta_columns(),
|
||||||
stringify: stringify_fn(metadata.is_html),
|
stringify: stringify_fn(metadata.is_html),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
@ -166,11 +170,19 @@ impl ColumnContext {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gather_note_fields(&self, record: &csv::StringRecord) -> Vec<Option<String>> {
|
fn gather_note_fields(&self, record: &csv::StringRecord) -> Vec<Option<String>> {
|
||||||
let stringify = self.stringify;
|
let op = |i| record.get(i - 1).map(self.stringify);
|
||||||
self.field_source_columns
|
if !self.field_source_columns.is_empty() {
|
||||||
.iter()
|
self.field_source_columns
|
||||||
.map(|opt| opt.and_then(|idx| record.get(idx - 1)).map(stringify))
|
.iter()
|
||||||
.collect()
|
.map(|opt| opt.and_then(op))
|
||||||
|
.collect()
|
||||||
|
} else {
|
||||||
|
// notetype column provided, assume all non-metadata columns are notetype fields
|
||||||
|
(1..=record.len())
|
||||||
|
.filter(|i| !self.meta_columns.contains(i))
|
||||||
|
.map(op)
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -291,11 +291,8 @@ impl CsvMetadataHelpers for CsvMetadata {
|
||||||
.map(|&i| (i > 0).then_some(i as usize))
|
.map(|&i| (i > 0).then_some(i as usize))
|
||||||
.collect(),
|
.collect(),
|
||||||
CsvNotetype::NotetypeColumn(_) => {
|
CsvNotetype::NotetypeColumn(_) => {
|
||||||
let meta_columns = self.meta_columns();
|
// each row's notetype could have varying number of fields
|
||||||
(1..self.column_labels.len() + 1)
|
vec![]
|
||||||
.filter(|idx| !meta_columns.contains(idx))
|
|
||||||
.map(Some)
|
|
||||||
.collect()
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -173,7 +173,9 @@ pub fn add_data_to_folder_uniquely<'a, P>(
|
||||||
where
|
where
|
||||||
P: AsRef<Path>,
|
P: AsRef<Path>,
|
||||||
{
|
{
|
||||||
let normalized_name = normalize_filename(desired_name);
|
// force lowercase to account for case-insensitive filesystems
|
||||||
|
// but not within normalize_filename, for existing media refs
|
||||||
|
let normalized_name: Cow<_> = normalize_filename(desired_name).to_lowercase().into();
|
||||||
|
|
||||||
let mut target_path = folder.as_ref().join(normalized_name.as_ref());
|
let mut target_path = folder.as_ref().join(normalized_name.as_ref());
|
||||||
|
|
||||||
|
|
@ -496,8 +498,14 @@ mod test {
|
||||||
"test.mp3"
|
"test.mp3"
|
||||||
);
|
);
|
||||||
|
|
||||||
// different contents
|
// different contents, filenames differ only by case
|
||||||
let h2 = sha1_of_data(b"hello1");
|
let h2 = sha1_of_data(b"hello1");
|
||||||
|
assert_eq!(
|
||||||
|
add_data_to_folder_uniquely(dpath, "Test.mp3", b"hello1", h2).unwrap(),
|
||||||
|
"test-88fdd585121a4ccb3d1540527aee53a77c77abb8.mp3"
|
||||||
|
);
|
||||||
|
|
||||||
|
// same contents, filenames differ only by case
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
add_data_to_folder_uniquely(dpath, "test.mp3", b"hello1", h2).unwrap(),
|
add_data_to_folder_uniquely(dpath, "test.mp3", b"hello1", h2).unwrap(),
|
||||||
"test-88fdd585121a4ccb3d1540527aee53a77c77abb8.mp3"
|
"test-88fdd585121a4ccb3d1540527aee53a77c77abb8.mp3"
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,6 @@ pub mod files;
|
||||||
mod service;
|
mod service;
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
|
@ -22,6 +21,7 @@ use crate::progress::ThrottlingProgressHandler;
|
||||||
use crate::sync::http_client::HttpSyncClient;
|
use crate::sync::http_client::HttpSyncClient;
|
||||||
use crate::sync::login::SyncAuth;
|
use crate::sync::login::SyncAuth;
|
||||||
use crate::sync::media::database::client::changetracker::ChangeTracker;
|
use crate::sync::media::database::client::changetracker::ChangeTracker;
|
||||||
|
pub use crate::sync::media::database::client::Checksums;
|
||||||
use crate::sync::media::database::client::MediaDatabase;
|
use crate::sync::media::database::client::MediaDatabase;
|
||||||
use crate::sync::media::database::client::MediaEntry;
|
use crate::sync::media::database::client::MediaEntry;
|
||||||
use crate::sync::media::progress::MediaSyncProgress;
|
use crate::sync::media::progress::MediaSyncProgress;
|
||||||
|
|
@ -157,7 +157,7 @@ impl MediaManager {
|
||||||
pub fn all_checksums_after_checking(
|
pub fn all_checksums_after_checking(
|
||||||
&self,
|
&self,
|
||||||
progress: impl FnMut(usize) -> bool,
|
progress: impl FnMut(usize) -> bool,
|
||||||
) -> Result<HashMap<String, Sha1Hash>> {
|
) -> Result<Checksums> {
|
||||||
ChangeTracker::new(&self.media_folder, progress).register_changes(&self.db)?;
|
ChangeTracker::new(&self.media_folder, progress).register_changes(&self.db)?;
|
||||||
self.db.all_registered_checksums()
|
self.db.all_registered_checksums()
|
||||||
}
|
}
|
||||||
|
|
@ -176,7 +176,7 @@ impl MediaManager {
|
||||||
|
|
||||||
/// All checksums without registering changes first.
|
/// All checksums without registering changes first.
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) fn all_checksums_as_is(&self) -> HashMap<String, [u8; 20]> {
|
pub(crate) fn all_checksums_as_is(&self) -> Checksums {
|
||||||
self.db.all_registered_checksums().unwrap()
|
self.db.all_registered_checksums().unwrap()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -122,7 +122,7 @@ pub(crate) fn basic(tr: &I18n) -> Notetype {
|
||||||
|
|
||||||
pub(crate) fn basic_typing(tr: &I18n) -> Notetype {
|
pub(crate) fn basic_typing(tr: &I18n) -> Notetype {
|
||||||
let mut nt = basic(tr);
|
let mut nt = basic(tr);
|
||||||
nt.config.original_stock_kind = StockKind::BasicTyping as i32;
|
nt.config.original_stock_kind = OriginalStockKind::BasicTyping as i32;
|
||||||
nt.name = tr.notetypes_basic_type_answer_name().into();
|
nt.name = tr.notetypes_basic_type_answer_name().into();
|
||||||
let front = tr.notetypes_front_field();
|
let front = tr.notetypes_front_field();
|
||||||
let back = tr.notetypes_back_field();
|
let back = tr.notetypes_back_field();
|
||||||
|
|
@ -138,7 +138,7 @@ pub(crate) fn basic_typing(tr: &I18n) -> Notetype {
|
||||||
|
|
||||||
pub(crate) fn basic_forward_reverse(tr: &I18n) -> Notetype {
|
pub(crate) fn basic_forward_reverse(tr: &I18n) -> Notetype {
|
||||||
let mut nt = basic(tr);
|
let mut nt = basic(tr);
|
||||||
nt.config.original_stock_kind = StockKind::BasicAndReversed as i32;
|
nt.config.original_stock_kind = OriginalStockKind::BasicAndReversed as i32;
|
||||||
nt.name = tr.notetypes_basic_reversed_name().into();
|
nt.name = tr.notetypes_basic_reversed_name().into();
|
||||||
let front = tr.notetypes_front_field();
|
let front = tr.notetypes_front_field();
|
||||||
let back = tr.notetypes_back_field();
|
let back = tr.notetypes_back_field();
|
||||||
|
|
@ -156,7 +156,7 @@ pub(crate) fn basic_forward_reverse(tr: &I18n) -> Notetype {
|
||||||
|
|
||||||
pub(crate) fn basic_optional_reverse(tr: &I18n) -> Notetype {
|
pub(crate) fn basic_optional_reverse(tr: &I18n) -> Notetype {
|
||||||
let mut nt = basic_forward_reverse(tr);
|
let mut nt = basic_forward_reverse(tr);
|
||||||
nt.config.original_stock_kind = StockKind::BasicOptionalReversed as i32;
|
nt.config.original_stock_kind = OriginalStockKind::BasicOptionalReversed as i32;
|
||||||
nt.name = tr.notetypes_basic_optional_reversed_name().into();
|
nt.name = tr.notetypes_basic_optional_reversed_name().into();
|
||||||
let addrev = tr.notetypes_add_reverse_field();
|
let addrev = tr.notetypes_add_reverse_field();
|
||||||
nt.add_field(addrev.as_ref());
|
nt.add_field(addrev.as_ref());
|
||||||
|
|
|
||||||
|
|
@ -85,6 +85,15 @@ impl RevlogEntry {
|
||||||
.unwrap()
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn last_interval_secs(&self) -> u32 {
|
||||||
|
u32::try_from(if self.last_interval > 0 {
|
||||||
|
self.last_interval.saturating_mul(86_400)
|
||||||
|
} else {
|
||||||
|
self.last_interval.saturating_mul(-1)
|
||||||
|
})
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns true if this entry represents a reset operation.
|
/// Returns true if this entry represents a reset operation.
|
||||||
/// These entries are created when a card is reset using
|
/// These entries are created when a card is reset using
|
||||||
/// [`Collection::reschedule_cards_as_new`].
|
/// [`Collection::reschedule_cards_as_new`].
|
||||||
|
|
|
||||||
|
|
@ -443,9 +443,20 @@ impl Collection {
|
||||||
.storage
|
.storage
|
||||||
.get_deck(card.deck_id)?
|
.get_deck(card.deck_id)?
|
||||||
.or_not_found(card.deck_id)?;
|
.or_not_found(card.deck_id)?;
|
||||||
let config = self.home_deck_config(deck.config_id(), card.original_deck_id)?;
|
let home_deck = if card.original_deck_id.0 == 0 {
|
||||||
|
&deck
|
||||||
|
} else {
|
||||||
|
&self
|
||||||
|
.storage
|
||||||
|
.get_deck(card.original_deck_id)?
|
||||||
|
.or_not_found(card.original_deck_id)?
|
||||||
|
};
|
||||||
|
let config = self
|
||||||
|
.storage
|
||||||
|
.get_deck_config(home_deck.config_id().or_invalid("home deck is filtered")?)?
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
let desired_retention = deck.effective_desired_retention(&config);
|
let desired_retention = home_deck.effective_desired_retention(&config);
|
||||||
let fsrs_enabled = self.get_config_bool(BoolKey::Fsrs);
|
let fsrs_enabled = self.get_config_bool(BoolKey::Fsrs);
|
||||||
let fsrs_next_states = if fsrs_enabled {
|
let fsrs_next_states = if fsrs_enabled {
|
||||||
let params = config.fsrs_params();
|
let params = config.fsrs_params();
|
||||||
|
|
|
||||||
|
|
@ -13,13 +13,7 @@ impl From<FSRSError> for AnkiError {
|
||||||
FSRSError::OptimalNotFound => AnkiError::FsrsUnableToDetermineDesiredRetention,
|
FSRSError::OptimalNotFound => AnkiError::FsrsUnableToDetermineDesiredRetention,
|
||||||
FSRSError::Interrupted => AnkiError::Interrupted,
|
FSRSError::Interrupted => AnkiError::Interrupted,
|
||||||
FSRSError::InvalidParameters => AnkiError::FsrsParamsInvalid,
|
FSRSError::InvalidParameters => AnkiError::FsrsParamsInvalid,
|
||||||
FSRSError::InvalidInput => AnkiError::InvalidInput {
|
FSRSError::InvalidInput => AnkiError::FsrsParamsInvalid,
|
||||||
source: InvalidInputError {
|
|
||||||
message: "invalid params provided".to_string(),
|
|
||||||
source: None,
|
|
||||||
backtrace: None,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
FSRSError::InvalidDeckSize => AnkiError::InvalidInput {
|
FSRSError::InvalidDeckSize => AnkiError::InvalidInput {
|
||||||
source: InvalidInputError {
|
source: InvalidInputError {
|
||||||
message: "no cards to simulate".to_string(),
|
message: "no cards to simulate".to_string(),
|
||||||
|
|
|
||||||
|
|
@ -136,6 +136,19 @@ impl Collection {
|
||||||
let deckconfig_id = deck.config_id().unwrap();
|
let deckconfig_id = deck.config_id().unwrap();
|
||||||
// reschedule it
|
// reschedule it
|
||||||
let original_interval = card.interval;
|
let original_interval = card.interval;
|
||||||
|
let min_interval = |interval: u32| {
|
||||||
|
let previous_interval =
|
||||||
|
last_info.previous_interval.unwrap_or(0);
|
||||||
|
if interval > previous_interval {
|
||||||
|
// interval grew; don't allow fuzzed interval to
|
||||||
|
// be less than previous+1
|
||||||
|
previous_interval + 1
|
||||||
|
} else {
|
||||||
|
// interval shrunk; don't restrict negative fuzz
|
||||||
|
0
|
||||||
|
}
|
||||||
|
.max(1)
|
||||||
|
};
|
||||||
let interval = fsrs.next_interval(
|
let interval = fsrs.next_interval(
|
||||||
Some(state.stability),
|
Some(state.stability),
|
||||||
desired_retention,
|
desired_retention,
|
||||||
|
|
@ -146,7 +159,7 @@ impl Collection {
|
||||||
.and_then(|r| {
|
.and_then(|r| {
|
||||||
r.find_interval(
|
r.find_interval(
|
||||||
interval,
|
interval,
|
||||||
1,
|
min_interval(interval as u32),
|
||||||
req.max_interval,
|
req.max_interval,
|
||||||
days_elapsed as u32,
|
days_elapsed as u32,
|
||||||
deckconfig_id,
|
deckconfig_id,
|
||||||
|
|
@ -157,7 +170,7 @@ impl Collection {
|
||||||
with_review_fuzz(
|
with_review_fuzz(
|
||||||
card.get_fuzz_factor(true),
|
card.get_fuzz_factor(true),
|
||||||
interval,
|
interval,
|
||||||
1,
|
min_interval(interval as u32),
|
||||||
req.max_interval,
|
req.max_interval,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
@ -310,6 +323,9 @@ pub(crate) struct LastRevlogInfo {
|
||||||
/// reviewed the card and now, so that we can determine an accurate period
|
/// reviewed the card and now, so that we can determine an accurate period
|
||||||
/// when the card has subsequently been rescheduled to a different day.
|
/// when the card has subsequently been rescheduled to a different day.
|
||||||
pub(crate) last_reviewed_at: Option<TimestampSecs>,
|
pub(crate) last_reviewed_at: Option<TimestampSecs>,
|
||||||
|
/// The interval before the latest review. Used to prevent fuzz from going
|
||||||
|
/// backwards when rescheduling the card
|
||||||
|
pub(crate) previous_interval: Option<u32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return a map of cards to info about last review.
|
/// Return a map of cards to info about last review.
|
||||||
|
|
@ -321,14 +337,27 @@ pub(crate) fn get_last_revlog_info(revlogs: &[RevlogEntry]) -> HashMap<CardId, L
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.for_each(|(card_id, group)| {
|
.for_each(|(card_id, group)| {
|
||||||
let mut last_reviewed_at = None;
|
let mut last_reviewed_at = None;
|
||||||
|
let mut previous_interval = None;
|
||||||
for e in group.into_iter() {
|
for e in group.into_iter() {
|
||||||
if e.has_rating_and_affects_scheduling() {
|
if e.has_rating_and_affects_scheduling() {
|
||||||
last_reviewed_at = Some(e.id.as_secs());
|
last_reviewed_at = Some(e.id.as_secs());
|
||||||
|
previous_interval = if e.last_interval >= 0 && e.button_chosen > 1 {
|
||||||
|
Some(e.last_interval as u32)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
} else if e.is_reset() {
|
} else if e.is_reset() {
|
||||||
last_reviewed_at = None;
|
last_reviewed_at = None;
|
||||||
|
previous_interval = None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
out.insert(card_id, LastRevlogInfo { last_reviewed_at });
|
out.insert(
|
||||||
|
card_id,
|
||||||
|
LastRevlogInfo {
|
||||||
|
last_reviewed_at,
|
||||||
|
previous_interval,
|
||||||
|
},
|
||||||
|
);
|
||||||
});
|
});
|
||||||
out
|
out
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -478,27 +478,42 @@ pub(crate) fn reviews_for_fsrs(
|
||||||
}))
|
}))
|
||||||
.collect_vec();
|
.collect_vec();
|
||||||
|
|
||||||
let skip = if training { 1 } else { 0 };
|
let items = if training {
|
||||||
// Convert the remaining entries into separate FSRSItems, where each item
|
// Convert the remaining entries into separate FSRSItems, where each item
|
||||||
// contains all reviews done until then.
|
// contains all reviews done until then.
|
||||||
let items: Vec<(RevlogId, FSRSItem)> = entries
|
let mut items = Vec::with_capacity(entries.len());
|
||||||
.iter()
|
let mut current_reviews = Vec::with_capacity(entries.len());
|
||||||
.enumerate()
|
for (idx, (entry, &delta_t)) in entries.iter().zip(delta_ts.iter()).enumerate() {
|
||||||
.skip(skip)
|
current_reviews.push(FSRSReview {
|
||||||
.map(|(outer_idx, entry)| {
|
rating: entry.button_chosen as u32,
|
||||||
let reviews = entries
|
delta_t,
|
||||||
.iter()
|
});
|
||||||
.take(outer_idx + 1)
|
if idx >= 1 && delta_t > 0 {
|
||||||
.enumerate()
|
items.push((
|
||||||
.map(|(inner_idx, r)| FSRSReview {
|
entry.id,
|
||||||
rating: r.button_chosen as u32,
|
FSRSItem {
|
||||||
delta_t: delta_ts[inner_idx],
|
reviews: current_reviews.clone(),
|
||||||
})
|
},
|
||||||
.collect();
|
));
|
||||||
(entry.id, FSRSItem { reviews })
|
}
|
||||||
})
|
}
|
||||||
.filter(|(_, item)| !training || item.reviews.last().unwrap().delta_t > 0)
|
items
|
||||||
.collect_vec();
|
} else {
|
||||||
|
// When not training, we only need the final FSRS item, which represents
|
||||||
|
// the complete history of the card. This avoids expensive clones in a loop.
|
||||||
|
let reviews = entries
|
||||||
|
.iter()
|
||||||
|
.zip(delta_ts.iter())
|
||||||
|
.map(|(entry, &delta_t)| FSRSReview {
|
||||||
|
rating: entry.button_chosen as u32,
|
||||||
|
delta_t,
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
let last_entry = entries.last().unwrap();
|
||||||
|
|
||||||
|
vec![(last_entry.id, FSRSItem { reviews })]
|
||||||
|
};
|
||||||
|
|
||||||
if items.is_empty() {
|
if items.is_empty() {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -738,7 +753,7 @@ pub(crate) mod tests {
|
||||||
],
|
],
|
||||||
false,
|
false,
|
||||||
),
|
),
|
||||||
fsrs_items!([review(0)], [review(0), review(1)])
|
fsrs_items!([review(0), review(1)])
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -809,7 +824,7 @@ pub(crate) mod tests {
|
||||||
// R | A X R
|
// R | A X R
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
convert_ignore_before(revlogs, false, days_ago_ms(9)),
|
convert_ignore_before(revlogs, false, days_ago_ms(9)),
|
||||||
fsrs_items!([review(0)], [review(0), review(2)])
|
fsrs_items!([review(0), review(2)])
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -828,6 +843,9 @@ pub(crate) mod tests {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
convert_ignore_before(revlogs, false, days_ago_ms(9))
|
convert_ignore_before(revlogs, false, days_ago_ms(9))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
.last()
|
||||||
|
.unwrap()
|
||||||
|
.reviews
|
||||||
.len(),
|
.len(),
|
||||||
2
|
2
|
||||||
);
|
);
|
||||||
|
|
@ -849,6 +867,9 @@ pub(crate) mod tests {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
convert_ignore_before(revlogs, false, days_ago_ms(9))
|
convert_ignore_before(revlogs, false, days_ago_ms(9))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
.last()
|
||||||
|
.unwrap()
|
||||||
|
.reviews
|
||||||
.len(),
|
.len(),
|
||||||
2
|
2
|
||||||
);
|
);
|
||||||
|
|
|
||||||
|
|
@ -115,13 +115,14 @@ impl Rescheduler {
|
||||||
pub fn find_interval(
|
pub fn find_interval(
|
||||||
&self,
|
&self,
|
||||||
interval: f32,
|
interval: f32,
|
||||||
minimum: u32,
|
minimum_interval: u32,
|
||||||
maximum: u32,
|
maximum_interval: u32,
|
||||||
days_elapsed: u32,
|
days_elapsed: u32,
|
||||||
deckconfig_id: DeckConfigId,
|
deckconfig_id: DeckConfigId,
|
||||||
fuzz_seed: Option<u64>,
|
fuzz_seed: Option<u64>,
|
||||||
) -> Option<u32> {
|
) -> Option<u32> {
|
||||||
let (before_days, after_days) = constrained_fuzz_bounds(interval, minimum, maximum);
|
let (before_days, after_days) =
|
||||||
|
constrained_fuzz_bounds(interval, minimum_interval, maximum_interval);
|
||||||
|
|
||||||
// Don't reschedule the card when it's overdue
|
// Don't reschedule the card when it's overdue
|
||||||
if after_days < days_elapsed {
|
if after_days < days_elapsed {
|
||||||
|
|
|
||||||
|
|
@ -392,6 +392,11 @@ fn parse_tag(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
tag: unescape_quotes(re),
|
tag: unescape_quotes(re),
|
||||||
mode: FieldSearchMode::Regex,
|
mode: FieldSearchMode::Regex,
|
||||||
}
|
}
|
||||||
|
} else if let Some(nc) = s.strip_prefix("nc:") {
|
||||||
|
SearchNode::Tag {
|
||||||
|
tag: unescape(nc)?,
|
||||||
|
mode: FieldSearchMode::NoCombining,
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
SearchNode::Tag {
|
SearchNode::Tag {
|
||||||
tag: unescape(s)?,
|
tag: unescape(s)?,
|
||||||
|
|
|
||||||
|
|
@ -311,8 +311,19 @@ impl SqlWriter<'_> {
|
||||||
}
|
}
|
||||||
s if s.contains(' ') => write!(self.sql, "false").unwrap(),
|
s if s.contains(' ') => write!(self.sql, "false").unwrap(),
|
||||||
text => {
|
text => {
|
||||||
write!(self.sql, "n.tags regexp ?").unwrap();
|
let text = if mode == FieldSearchMode::Normal {
|
||||||
let re = &to_custom_re(text, r"\S");
|
write!(self.sql, "n.tags regexp ?").unwrap();
|
||||||
|
Cow::from(text)
|
||||||
|
} else {
|
||||||
|
write!(
|
||||||
|
self.sql,
|
||||||
|
"coalesce(process_text(n.tags, {}), n.tags) regexp ?",
|
||||||
|
ProcessTextFlags::NoCombining.bits()
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
without_combining(text)
|
||||||
|
};
|
||||||
|
let re = &to_custom_re(&text, r"\S");
|
||||||
self.args.push(format!("(?i).* {re}(::| ).*"));
|
self.args.push(format!("(?i).* {re}(::| ).*"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -76,8 +76,15 @@ impl Collection {
|
||||||
note_id: card.note_id.into(),
|
note_id: card.note_id.into(),
|
||||||
deck: deck.human_name(),
|
deck: deck.human_name(),
|
||||||
added: card.id.as_secs().0,
|
added: card.id.as_secs().0,
|
||||||
first_review: revlog.first().map(|entry| entry.id.as_secs().0),
|
first_review: revlog
|
||||||
latest_review: revlog.last().map(|entry| entry.id.as_secs().0),
|
.iter()
|
||||||
|
.find(|entry| entry.has_rating())
|
||||||
|
.map(|entry| entry.id.as_secs().0),
|
||||||
|
// last_review_time is not used to ensure cram revlogs are included.
|
||||||
|
latest_review: revlog
|
||||||
|
.iter()
|
||||||
|
.rfind(|entry| entry.has_rating())
|
||||||
|
.map(|entry| entry.id.as_secs().0),
|
||||||
due_date: self.due_date(&card)?,
|
due_date: self.due_date(&card)?,
|
||||||
due_position: self.position(&card),
|
due_position: self.position(&card),
|
||||||
interval: card.interval,
|
interval: card.interval,
|
||||||
|
|
@ -220,6 +227,7 @@ fn stats_revlog_entry(
|
||||||
ease: entry.ease_factor,
|
ease: entry.ease_factor,
|
||||||
taken_secs: entry.taken_millis as f32 / 1000.,
|
taken_secs: entry.taken_millis as f32 / 1000.,
|
||||||
memory_state: None,
|
memory_state: None,
|
||||||
|
last_interval: entry.last_interval_secs(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -18,6 +18,20 @@ use crate::prelude::*;
|
||||||
|
|
||||||
pub mod changetracker;
|
pub mod changetracker;
|
||||||
|
|
||||||
|
pub struct Checksums(HashMap<String, Sha1Hash>);
|
||||||
|
|
||||||
|
impl Checksums {
|
||||||
|
// case-fold filenames when checking files to be imported
|
||||||
|
// to account for case-insensitive filesystems
|
||||||
|
pub fn get(&self, key: impl AsRef<str>) -> Option<&Sha1Hash> {
|
||||||
|
self.0.get(key.as_ref().to_lowercase().as_str())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn contains_key(&self, key: impl AsRef<str>) -> bool {
|
||||||
|
self.get(key).is_some()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub struct MediaEntry {
|
pub struct MediaEntry {
|
||||||
pub fname: String,
|
pub fname: String,
|
||||||
|
|
@ -175,11 +189,12 @@ delete from media where fname=?",
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns all filenames and checksums, where the checksum is not null.
|
/// Returns all filenames and checksums, where the checksum is not null.
|
||||||
pub(crate) fn all_registered_checksums(&self) -> error::Result<HashMap<String, Sha1Hash>> {
|
pub(crate) fn all_registered_checksums(&self) -> error::Result<Checksums> {
|
||||||
self.db
|
self.db
|
||||||
.prepare("SELECT fname, csum FROM media WHERE csum IS NOT NULL")?
|
.prepare("SELECT fname, csum FROM media WHERE csum IS NOT NULL")?
|
||||||
.query_and_then([], row_to_name_and_checksum)?
|
.query_and_then([], row_to_name_and_checksum)?
|
||||||
.collect()
|
.collect::<error::Result<_>>()
|
||||||
|
.map(Checksums)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn force_resync(&self) -> error::Result<()> {
|
pub(crate) fn force_resync(&self) -> error::Result<()> {
|
||||||
|
|
|
||||||
|
|
@ -58,7 +58,7 @@ trait DiffTrait {
|
||||||
if self.get_typed() == self.get_expected() {
|
if self.get_typed() == self.get_expected() {
|
||||||
format_typeans!(format!(
|
format_typeans!(format!(
|
||||||
"<span class=typeGood>{}</span>",
|
"<span class=typeGood>{}</span>",
|
||||||
self.get_expected_original()
|
htmlescape::encode_minimal(&self.get_expected_original())
|
||||||
))
|
))
|
||||||
} else {
|
} else {
|
||||||
let output = self.to_tokens();
|
let output = self.to_tokens();
|
||||||
|
|
@ -391,6 +391,15 @@ mod test {
|
||||||
assert_eq!(ctx, "<code id=typeans>123</code>");
|
assert_eq!(ctx, "<code id=typeans>123</code>");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn correct_input_is_escaped() {
|
||||||
|
let ctx = Diff::new("source <dir>/bin/activate", "source <dir>/bin/activate");
|
||||||
|
assert_eq!(
|
||||||
|
ctx.to_html(),
|
||||||
|
"<code id=typeans><span class=typeGood>source <dir>/bin/activate</span></code>"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn correct_input_is_collapsed() {
|
fn correct_input_is_collapsed() {
|
||||||
let ctx = Diff::new("123", "123");
|
let ctx = Diff::new("123", "123");
|
||||||
|
|
|
||||||
|
|
@ -255,9 +255,7 @@ fn check_for_unstaged_changes() {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_licences() -> Result<String> {
|
fn generate_licences() -> Result<String> {
|
||||||
if which::which("cargo-license").is_err() {
|
Command::run("cargo install cargo-license@0.7.0")?;
|
||||||
Command::run("cargo install cargo-license@0.5.1")?;
|
|
||||||
}
|
|
||||||
let output = Command::run_with_output([
|
let output = Command::run_with_output([
|
||||||
"cargo-license",
|
"cargo-license",
|
||||||
"--features",
|
"--features",
|
||||||
|
|
|
||||||
|
|
@ -5,8 +5,6 @@ import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
sys.path.extend(["pylib", "qt", "out/pylib", "out/qt"])
|
sys.path.extend(["pylib", "qt", "out/pylib", "out/qt"])
|
||||||
if sys.platform == "win32":
|
|
||||||
os.environ["PATH"] += ";out\\extracted\\win_amd64_audio"
|
|
||||||
|
|
||||||
import aqt
|
import aqt
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -31,7 +31,6 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { randomUUID } from "@tslib/uuid";
|
|
||||||
import { onDestroy } from "svelte";
|
import { onDestroy } from "svelte";
|
||||||
import { writable } from "svelte/store";
|
import { writable } from "svelte/store";
|
||||||
|
|
||||||
|
|
@ -66,7 +65,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
$: empty = title === "MathJax";
|
$: empty = title === "MathJax";
|
||||||
$: encoded = encodeURIComponent(converted);
|
$: encoded = encodeURIComponent(converted);
|
||||||
|
|
||||||
const uuid = randomUUID();
|
const uuid = crypto.randomUUID();
|
||||||
const imageHeight = writable(0);
|
const imageHeight = writable(0);
|
||||||
imageToHeightMap.set(uuid, imageHeight);
|
imageToHeightMap.set(uuid, imageHeight);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -23,6 +23,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
export let title: string;
|
export let title: string;
|
||||||
export let url: string;
|
export let url: string;
|
||||||
|
export let linkLabel: string | undefined = undefined;
|
||||||
export let startIndex = 0;
|
export let startIndex = 0;
|
||||||
export let helpSections: HelpItem[];
|
export let helpSections: HelpItem[];
|
||||||
export let fsrs = false;
|
export let fsrs = false;
|
||||||
|
|
@ -106,11 +107,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
<div class="chapter-redirect">
|
<div class="chapter-redirect">
|
||||||
{@html renderMarkdown(
|
{@html renderMarkdown(
|
||||||
tr.helpForMoreInfo({
|
tr.helpForMoreInfo({
|
||||||
link: `<a href="${url}" title="${tr.helpOpenManualChapter(
|
link: `<a href="${url}" title="${tr.helpOpenManualChapter({ name: linkLabel ?? title })}">${linkLabel ?? title}</a>`,
|
||||||
{
|
|
||||||
name: title,
|
|
||||||
},
|
|
||||||
)}">${title}</a>`,
|
|
||||||
}),
|
}),
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,8 @@ export const HelpPage = {
|
||||||
limitsFromTop: "https://docs.ankiweb.net/deck-options.html#limits-start-from-top",
|
limitsFromTop: "https://docs.ankiweb.net/deck-options.html#limits-start-from-top",
|
||||||
dailyLimits: "https://docs.ankiweb.net/deck-options.html#daily-limits",
|
dailyLimits: "https://docs.ankiweb.net/deck-options.html#daily-limits",
|
||||||
audio: "https://docs.ankiweb.net/deck-options.html#audio",
|
audio: "https://docs.ankiweb.net/deck-options.html#audio",
|
||||||
fsrs: "http://docs.ankiweb.net/deck-options.html#fsrs",
|
fsrs: "https://docs.ankiweb.net/deck-options.html#fsrs",
|
||||||
|
desiredRetention: "https://docs.ankiweb.net/deck-options.html#desired-retention",
|
||||||
},
|
},
|
||||||
Leeches: {
|
Leeches: {
|
||||||
leeches: "https://docs.ankiweb.net/leeches.html#leeches",
|
leeches: "https://docs.ankiweb.net/leeches.html#leeches",
|
||||||
|
|
|
||||||
|
|
@ -1,16 +0,0 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|
||||||
|
|
||||||
/**
|
|
||||||
* TODO replace with crypto.randomUUID
|
|
||||||
*/
|
|
||||||
export function randomUUID(): string {
|
|
||||||
const value = `${1e7}-${1e3}-${4e3}-${8e3}-${1e11}`;
|
|
||||||
|
|
||||||
return value.replace(/[018]/g, (character: string): string =>
|
|
||||||
(
|
|
||||||
Number(character)
|
|
||||||
^ (crypto.getRandomValues(new Uint8Array(1))[0]
|
|
||||||
& (15 >> (Number(character) / 4)))
|
|
||||||
).toString(16));
|
|
||||||
}
|
|
||||||
|
|
@ -72,7 +72,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
>
|
>
|
||||||
<HelpModal
|
<HelpModal
|
||||||
title={tr.statisticsTrueRetentionTitle()}
|
title={tr.statisticsTrueRetentionTitle()}
|
||||||
url={HelpPage.DeckOptions.fsrs}
|
url={HelpPage.DeckOptions.desiredRetention}
|
||||||
|
linkLabel={tr.deckConfigDesiredRetention()}
|
||||||
{helpSections}
|
{helpSections}
|
||||||
on:mount={(e) => {
|
on:mount={(e) => {
|
||||||
modal = e.detail.modal;
|
modal = e.detail.modal;
|
||||||
|
|
|
||||||
|
|
@ -32,6 +32,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
saveNeededStore,
|
saveNeededStore,
|
||||||
opacityStateStore,
|
opacityStateStore,
|
||||||
} from "./store";
|
} from "./store";
|
||||||
|
import { get } from "svelte/store";
|
||||||
import { drawEllipse, drawPolygon, drawRectangle, drawText } from "./tools/index";
|
import { drawEllipse, drawPolygon, drawRectangle, drawText } from "./tools/index";
|
||||||
import { makeMaskTransparent, SHAPE_MASK_COLOR } from "./tools/lib";
|
import { makeMaskTransparent, SHAPE_MASK_COLOR } from "./tools/lib";
|
||||||
import { enableSelectable, stopDraw } from "./tools/lib";
|
import { enableSelectable, stopDraw } from "./tools/lib";
|
||||||
|
|
@ -55,6 +56,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
onWheelDragX,
|
onWheelDragX,
|
||||||
} from "./tools/tool-zoom";
|
} from "./tools/tool-zoom";
|
||||||
import { fillMask } from "./tools/tool-fill";
|
import { fillMask } from "./tools/tool-fill";
|
||||||
|
import { getCustomColours, saveCustomColours } from "@generated/backend";
|
||||||
|
|
||||||
export let canvas;
|
export let canvas;
|
||||||
export let iconSize;
|
export let iconSize;
|
||||||
|
|
@ -76,6 +78,16 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
let colourRef: HTMLInputElement | undefined;
|
let colourRef: HTMLInputElement | undefined;
|
||||||
const colour = writable(SHAPE_MASK_COLOR);
|
const colour = writable(SHAPE_MASK_COLOR);
|
||||||
|
|
||||||
|
const customColorPickerPalette = writable<string[]>([]);
|
||||||
|
|
||||||
|
async function loadCustomColours() {
|
||||||
|
customColorPickerPalette.set(
|
||||||
|
(await getCustomColours({})).colours.filter(
|
||||||
|
(hex) => !hex.startsWith("#ffffff"),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
function onClick(event: MouseEvent) {
|
function onClick(event: MouseEvent) {
|
||||||
const upperCanvas = document.querySelector(".upper-canvas");
|
const upperCanvas = document.querySelector(".upper-canvas");
|
||||||
if (event.target == upperCanvas) {
|
if (event.target == upperCanvas) {
|
||||||
|
|
@ -222,7 +234,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
}
|
}
|
||||||
|
|
||||||
onMount(() => {
|
onMount(() => {
|
||||||
opacityStateStore.set(maskOpacity);
|
maskOpacity = get(opacityStateStore);
|
||||||
removeHandlers = singleCallback(
|
removeHandlers = singleCallback(
|
||||||
on(document, "click", onClick),
|
on(document, "click", onClick),
|
||||||
on(window, "mousemove", onMousemove),
|
on(window, "mousemove", onMousemove),
|
||||||
|
|
@ -233,6 +245,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
on(document, "touchstart", onTouchstart),
|
on(document, "touchstart", onTouchstart),
|
||||||
on(document, "mousemove", onMousemoveDocument),
|
on(document, "mousemove", onMousemoveDocument),
|
||||||
);
|
);
|
||||||
|
loadCustomColours();
|
||||||
});
|
});
|
||||||
|
|
||||||
onDestroy(() => {
|
onDestroy(() => {
|
||||||
|
|
@ -241,7 +254,10 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<datalist id="colour-palette">
|
<datalist id="colour-palette">
|
||||||
<option value={SHAPE_MASK_COLOR}></option>
|
<option>{SHAPE_MASK_COLOR}</option>
|
||||||
|
{#each $customColorPickerPalette as colour}
|
||||||
|
<option>{colour}</option>
|
||||||
|
{/each}
|
||||||
</datalist>
|
</datalist>
|
||||||
|
|
||||||
<input
|
<input
|
||||||
|
|
@ -251,6 +267,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
list="colour-palette"
|
list="colour-palette"
|
||||||
value={SHAPE_MASK_COLOR}
|
value={SHAPE_MASK_COLOR}
|
||||||
on:input={(e) => ($colour = e.currentTarget!.value)}
|
on:input={(e) => ($colour = e.currentTarget!.value)}
|
||||||
|
on:change={() => saveCustomColours({})}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<div class="tool-bar-container" style:--fill-tool-colour={$colour}>
|
<div class="tool-bar-container" style:--fill-tool-colour={$colour}>
|
||||||
|
|
|
||||||
|
|
@ -8,10 +8,22 @@ import { fabric } from "fabric";
|
||||||
import { get } from "svelte/store";
|
import { get } from "svelte/store";
|
||||||
|
|
||||||
import { optimumCssSizeForCanvas } from "./canvas-scale";
|
import { optimumCssSizeForCanvas } from "./canvas-scale";
|
||||||
import { hideAllGuessOne, notesDataStore, saveNeededStore, tagsWritable, textEditingState } from "./store";
|
import {
|
||||||
|
hideAllGuessOne,
|
||||||
|
notesDataStore,
|
||||||
|
opacityStateStore,
|
||||||
|
saveNeededStore,
|
||||||
|
tagsWritable,
|
||||||
|
textEditingState,
|
||||||
|
} from "./store";
|
||||||
import Toast from "./Toast.svelte";
|
import Toast from "./Toast.svelte";
|
||||||
import { addShapesToCanvasFromCloze } from "./tools/add-from-cloze";
|
import { addShapesToCanvasFromCloze } from "./tools/add-from-cloze";
|
||||||
import { enableSelectable, makeShapesRemainInCanvas, moveShapeToCanvasBoundaries } from "./tools/lib";
|
import {
|
||||||
|
enableSelectable,
|
||||||
|
makeMaskTransparent,
|
||||||
|
makeShapesRemainInCanvas,
|
||||||
|
moveShapeToCanvasBoundaries,
|
||||||
|
} from "./tools/lib";
|
||||||
import { modifiedPolygon } from "./tools/tool-polygon";
|
import { modifiedPolygon } from "./tools/tool-polygon";
|
||||||
import { undoStack } from "./tools/tool-undo-redo";
|
import { undoStack } from "./tools/tool-undo-redo";
|
||||||
import { enablePinchZoom, onResize, setCanvasSize } from "./tools/tool-zoom";
|
import { enablePinchZoom, onResize, setCanvasSize } from "./tools/tool-zoom";
|
||||||
|
|
@ -83,6 +95,7 @@ export const setupMaskEditorForEdit = async (
|
||||||
window.requestAnimationFrame(() => {
|
window.requestAnimationFrame(() => {
|
||||||
onImageLoaded({ noteId: BigInt(noteId) });
|
onImageLoaded({ noteId: BigInt(noteId) });
|
||||||
});
|
});
|
||||||
|
if (get(opacityStateStore)) { makeMaskTransparent(canvas, true); }
|
||||||
};
|
};
|
||||||
|
|
||||||
return canvas;
|
return canvas;
|
||||||
|
|
|
||||||
|
|
@ -6939,8 +6939,8 @@ __metadata:
|
||||||
linkType: hard
|
linkType: hard
|
||||||
|
|
||||||
"vite@npm:6":
|
"vite@npm:6":
|
||||||
version: 6.3.6
|
version: 6.4.1
|
||||||
resolution: "vite@npm:6.3.6"
|
resolution: "vite@npm:6.4.1"
|
||||||
dependencies:
|
dependencies:
|
||||||
esbuild: "npm:^0.25.0"
|
esbuild: "npm:^0.25.0"
|
||||||
fdir: "npm:^6.4.4"
|
fdir: "npm:^6.4.4"
|
||||||
|
|
@ -6989,7 +6989,7 @@ __metadata:
|
||||||
optional: true
|
optional: true
|
||||||
bin:
|
bin:
|
||||||
vite: bin/vite.js
|
vite: bin/vite.js
|
||||||
checksum: 10c0/add701f1e72596c002275782e38d0389ab400c1be330c93a3009804d62db68097a936ca1c53c3301df3aaacfe5e328eab547060f31ef9c49a277ae50df6ad4fb
|
checksum: 10c0/77bb4c5b10f2a185e7859cc9a81c789021bc18009b02900347d1583b453b58e4b19ff07a5e5a5b522b68fc88728460bb45a63b104d969e8c6a6152aea3b849f7
|
||||||
languageName: node
|
languageName: node
|
||||||
linkType: hard
|
linkType: hard
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue