Garbage collect unused Fluent strings (#1482)

* Canonify import of i18n module

Should always be imported as `tr`, or `tr2` if there is a name collision
(Svelte).

* Add helper for garbage collecting ftl strings

Also add a serializer for ftl asts.

* Add helper for filter-mapping `DirEntry`s

* Fix `i18n_helpers/BUILD.bazel`

* run cargo-raze

* Refactor `garbage_collection.rs`

- Improve helper for file iterating
- Remove unused terms as well
- Fix issue with checking for nested messages by switching to a regex-
based approach (which runs before deleting)
- Some more refactorings and lint fixes

* Fix lints in `serialize.rs`

* Write json pretty and sorted

* Update `serialize.rs` and fix header

* Fix doc and remove `dbg!`

* Add binaries for ftl garbage collection

Also relax type constraints and strip debug tests.

* add rust_binary targets for i18n helpers (dae)

* add scripts to update desktop usage/garbage collect (dae)

Since we've already diverged from 2.1.49, we won't gain anything
from generating a stable json just yet. But once 2.1.50 is released,
we should run 'ftl/update-desktop-usage.sh stable'.

* add keys from AnkiMobile (dae)

* Mention caveats in `remove-unused.sh`
This commit is contained in:
RumovZ 2021-11-12 09:19:01 +01:00 committed by GitHub
parent d668ac8f7f
commit 0efa3f944f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
25 changed files with 2158 additions and 55 deletions

31
Cargo.lock generated
View file

@ -120,6 +120,17 @@ dependencies = [
"unic-langid",
]
[[package]]
name = "anki_i18n_helpers"
version = "0.0.0"
dependencies = [
"fluent-syntax",
"lazy_static",
"regex",
"serde_json",
"walkdir",
]
[[package]]
name = "anki_workspace"
version = "0.0.0"
@ -2014,6 +2025,15 @@ version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e"
[[package]]
name = "same-file"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
dependencies = [
"winapi-util",
]
[[package]]
name = "schannel"
version = "0.1.19"
@ -2825,6 +2845,17 @@ version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe"
[[package]]
name = "walkdir"
version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56"
dependencies = [
"same-file",
"winapi",
"winapi-util",
]
[[package]]
name = "want"
version = "0.3.0"

View file

@ -5,7 +5,7 @@ authors = ["Ankitects Pty Ltd and contributors"]
license = "AGPL-3.0-or-later"
[workspace]
members = ["rslib", "rslib/i18n", "pylib/rsbridge"]
members = ["rslib", "rslib/i18n", "rslib/i18n_helpers", "pylib/rsbridge"]
exclude = ["qt/package"]
[lib]

View file

@ -534,6 +534,15 @@ alias(
],
)
alias(
name = "walkdir",
actual = "@raze__walkdir__2_3_2//:walkdir",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "zip",
actual = "@raze__zip__0_5_13//:zip",

View file

@ -1981,6 +1981,16 @@ def raze_fetch_remote_crates():
build_file = Label("//cargo/remote:BUILD.ryu-1.0.5.bazel"),
)
maybe(
http_archive,
name = "raze__same_file__1_0_6",
url = "https://crates.io/api/v1/crates/same-file/1.0.6/download",
type = "tar.gz",
sha256 = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502",
strip_prefix = "same-file-1.0.6",
build_file = Label("//cargo/remote:BUILD.same-file-1.0.6.bazel"),
)
maybe(
http_archive,
name = "raze__schannel__0_1_19",
@ -2851,6 +2861,16 @@ def raze_fetch_remote_crates():
build_file = Label("//cargo/remote:BUILD.version_check-0.9.3.bazel"),
)
maybe(
http_archive,
name = "raze__walkdir__2_3_2",
url = "https://crates.io/api/v1/crates/walkdir/2.3.2/download",
type = "tar.gz",
sha256 = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56",
strip_prefix = "walkdir-2.3.2",
build_file = Label("//cargo/remote:BUILD.walkdir-2.3.2.bazel"),
)
maybe(
http_archive,
name = "raze__want__0_3_0",

View file

@ -0,0 +1,67 @@
"""
@generated
cargo-raze crate build file.
DO NOT EDIT! Replaced on runs of cargo-raze
"""
# buildifier: disable=load
load("@bazel_skylib//lib:selects.bzl", "selects")
# buildifier: disable=load
load(
"@rules_rust//rust:defs.bzl",
"rust_binary",
"rust_library",
"rust_proc_macro",
"rust_test",
)
package(default_visibility = [
# Public for visibility by "@raze__crate__version//" targets.
#
# Prefer access through "//cargo", which limits external
# visibility to explicit Cargo.toml dependencies.
"//visibility:public",
])
licenses([
"unencumbered", # Unlicense from expression "Unlicense OR MIT"
])
# Generated Targets
# Unsupported target "is_same_file" with type "example" omitted
# Unsupported target "is_stderr" with type "example" omitted
rust_library(
name = "same_file",
srcs = glob(["**/*.rs"]),
aliases = {
},
crate_features = [
],
crate_root = "src/lib.rs",
data = [],
edition = "2018",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-raze",
"manual",
],
version = "1.0.6",
# buildifier: leave-alone
deps = [
] + selects.with_or({
# cfg(windows)
(
"@rules_rust//rust/platform:x86_64-pc-windows-msvc",
): [
"@raze__winapi_util__0_1_5//:winapi_util",
],
"//conditions:default": [],
}),
)

65
cargo/remote/BUILD.walkdir-2.3.2.bazel vendored Normal file
View file

@ -0,0 +1,65 @@
"""
@generated
cargo-raze crate build file.
DO NOT EDIT! Replaced on runs of cargo-raze
"""
# buildifier: disable=load
load("@bazel_skylib//lib:selects.bzl", "selects")
# buildifier: disable=load
load(
"@rules_rust//rust:defs.bzl",
"rust_binary",
"rust_library",
"rust_proc_macro",
"rust_test",
)
package(default_visibility = [
# Public for visibility by "@raze__crate__version//" targets.
#
# Prefer access through "//cargo", which limits external
# visibility to explicit Cargo.toml dependencies.
"//visibility:public",
])
licenses([
"unencumbered", # Unlicense from expression "Unlicense OR MIT"
])
# Generated Targets
rust_library(
name = "walkdir",
srcs = glob(["**/*.rs"]),
aliases = {
},
crate_features = [
],
crate_root = "src/lib.rs",
data = [],
edition = "2018",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-raze",
"manual",
],
version = "2.3.2",
# buildifier: leave-alone
deps = [
"@raze__same_file__1_0_6//:same_file",
] + selects.with_or({
# cfg(windows)
(
"@rules_rust//rust/platform:x86_64-pc-windows-msvc",
): [
"@raze__winapi__0_3_9//:winapi",
"@raze__winapi_util__0_1_5//:winapi_util",
],
"//conditions:default": [],
}),
)

18
ftl/remove-unused.sh Executable file
View file

@ -0,0 +1,18 @@
#!/bin/bash
# Caveats:
# - Messages are considered in use if they are referenced in other messages,
# even if those messages themselves are not in use and going to be deleted.
# - Usually, if there is a bug and a message is failed to be recognised as in
# use, building will fail. However, this is not true for nested message, for
# which only a runtime error will be printed.
set -e
root=$(realpath $(dirname $0)/..)
# update currently used keys
./update-desktop-usage.sh head
# then remove unused keys
bazel run //rslib/i18n_helpers:garbage_collect_ftl_entries $root/ftl $root/ftl/usage

13
ftl/update-ankimobile-usage.sh Executable file
View file

@ -0,0 +1,13 @@
#!/bin/bash
#
# This script can only be run by Damien, as it requires a copy of AnkiMobile's sources.
# A similar script could be added for AnkiDroid in the future.
#
set -e
scriptRoot=$(realpath $(dirname $0)/..)
sourceRoot=$(realpath $scriptRoot/../mob/src)
bazel run //rslib/i18n_helpers:write_ftl_json $scriptRoot/ftl/usage/ankimobile.json \
$sourceRoot

9
ftl/update-desktop-usage.sh Executable file
View file

@ -0,0 +1,9 @@
#!/bin/bash
set -e
version=$1
root=$(realpath $(dirname $0)/..)
bazel run //rslib/i18n_helpers:write_ftl_json $root/ftl/usage/desktop-$version.json \
$root/{rslib,ts,pylib,qt}

1
ftl/usage/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
desktop-head.json

574
ftl/usage/ankimobile.json Normal file
View file

@ -0,0 +1,574 @@
[
"about-anki-written-by",
"about-license-text",
"about-please-see",
"about-some-thirdparty-libraries",
"about-thanks-contributors",
"about-thanks-for-support",
"about-the-anki-homepage",
"actions-actions",
"actions-add",
"actions-add-new",
"actions-add-short",
"actions-added-to-frequent-actions",
"actions-all-actions",
"actions-already-in-frequent-actions",
"actions-an-error-occurred",
"actions-answer-again",
"actions-answer-again-short",
"actions-answer-easy",
"actions-answer-easy-short",
"actions-answer-good",
"actions-answer-good-short",
"actions-answer-hard",
"actions-answer-hard-short",
"actions-are-you-sure",
"actions-auto-advance",
"actions-auto-advance-short",
"actions-bottom-bar",
"actions-bottom-bar-short",
"actions-browse",
"actions-browse-short",
"actions-bury-card-short",
"actions-bury-note-short",
"actions-cancel",
"actions-card-info",
"actions-card-info-short",
"actions-card-template",
"actions-card-template-short",
"actions-close",
"actions-confirm-delete",
"actions-current-audio-minus5s",
"actions-current-audio-minus5s-short",
"actions-current-audio-plus5s",
"actions-current-audio-plus5s-short",
"actions-custom-study",
"actions-custom-study-cant-be-used-on",
"actions-custom-study-short",
"actions-deck-statistics",
"actions-deck-statistics-short",
"actions-decks",
"actions-decks-short",
"actions-delete",
"actions-delete-note-short",
"actions-discard-changes",
"actions-done",
"actions-downloading",
"actions-drag-here-to-remove",
"actions-draw",
"actions-edit-short",
"actions-empty-short",
"actions-export",
"actions-file-invalid-or-corrupt",
"actions-filter",
"actions-filter-short",
"actions-filtercram",
"actions-flag-blue",
"actions-flag-green",
"actions-flag-number",
"actions-flag-orange",
"actions-flag-pink",
"actions-flag-purple",
"actions-flag-red",
"actions-flag-turquoise",
"actions-frequent-actions",
"actions-import",
"actions-leave-without-saving",
"actions-long-press-on-an-item-to",
"actions-mark",
"actions-mark-and-bury",
"actions-mark-and-bury-short",
"actions-mark-and-suspend",
"actions-mark-and-suspend-short",
"actions-mark-short",
"actions-new-name",
"actions-night-mode-short",
"actions-no-current-card",
"actions-no-load-restore-backup",
"actions-not-valid-link",
"actions-nothing-to-redo",
"actions-nothing-to-undo",
"actions-off",
"actions-off-short",
"actions-options",
"actions-options-for",
"actions-pause-audio-short",
"actions-please-tap-the-in-the",
"actions-preview",
"actions-processing",
"actions-rebuild",
"actions-rebuild-short",
"actions-record-voice",
"actions-record-voice-menu",
"actions-record-voice-short",
"actions-redo-short",
"actions-rename",
"actions-rename-deck",
"actions-replay-audio",
"actions-replay-audio-short",
"actions-replay-voice-short",
"actions-reset-card",
"actions-reset-card-short",
"actions-revert",
"actions-review-undone",
"actions-save",
"actions-scratchpad",
"actions-scratchpad-short",
"actions-scratchpad-size",
"actions-scratchpad-size-short",
"actions-search",
"actions-select-deck",
"actions-select-note-type",
"actions-set-due-date",
"actions-set-due-date-short",
"actions-show-answer-short",
"actions-show-answeranswer-good",
"actions-show-answeranswer-good-short",
"actions-study-options",
"actions-study-options-short",
"actions-suspend-card",
"actions-suspend-card-short",
"actions-suspend-note-short",
"actions-tools",
"actions-tools-overlay",
"actions-tools-short",
"actions-top-bar",
"actions-top-bar-short",
"actions-unbury-deck",
"actions-unbury-deck-short",
"actions-undo",
"actions-undo-short",
"adding-added",
"adding-cloze-outside-cloze-field",
"adding-cloze-outside-cloze-notetype",
"adding-the-first-field-is-empty",
"adding-you-have-a-cloze-deletion-note",
"browsing-added-today",
"browsing-again-today",
"browsing-any-flag",
"browsing-append",
"browsing-append-negated",
"browsing-card-updated",
"browsing-cards-deleted",
"browsing-cards-updated",
"browsing-change-deck",
"browsing-change-flag",
"browsing-change-note-type",
"browsing-change-notetype",
"browsing-clear-flag",
"browsing-clear-unused-tags",
"browsing-column1",
"browsing-column2",
"browsing-confirm-reset",
"browsing-current-deck",
"browsing-dd-selected",
"browsing-delete-notes",
"browsing-due-dateorder",
"browsing-due-reviews",
"browsing-ease",
"browsing-filtered",
"browsing-find",
"browsing-find-and-replace",
"browsing-interval",
"browsing-learning-cards",
"browsing-no-cards-are-selected",
"browsing-no-flag",
"browsing-note-created",
"browsing-note-updated",
"browsing-note2",
"browsing-notes-updated",
"browsing-question",
"browsing-questionandanswer",
"browsing-removed-unused-tags-count",
"browsing-repetitions",
"browsing-replace-with",
"browsing-reschedule",
"browsing-reset-cards",
"browsing-row-deleted",
"browsing-searching",
"browsing-second-column19",
"browsing-second-column91",
"browsing-select-all",
"browsing-sort",
"browsing-sort-field",
"browsing-sort-order",
"browsing-studied-today",
"browsing-suspended",
"browsing-tag",
"browsing-toggle-suspend",
"browsing-whole-collection",
"card-stats-note-type",
"card-templates-flip",
"card-templates-night-mode",
"card-templates-template-styling",
"custom-study-any-tag",
"custom-study-available",
"custom-study-cant-extend-limits-no-extra",
"custom-study-cram-seen-cards-with-certain-tags",
"custom-study-custom-study-session",
"custom-study-days-to-look-ahead",
"custom-study-days-to-look-back",
"custom-study-exclude-cards-with-tag",
"custom-study-extra-new-cards",
"custom-study-extra-review-cards",
"custom-study-include-cards-with-tag",
"custom-study-increase-todays-new-card-limit",
"custom-study-increase-todays-review-card-limit",
"custom-study-learn-new-cards-with-certain-tags",
"custom-study-loading",
"custom-study-max-cards-to-gather",
"custom-study-no-tags",
"custom-study-no-tags-available",
"custom-study-ok",
"custom-study-preview-all-cards-with-certain-tags",
"custom-study-preview-new-cards",
"custom-study-review-ahead",
"custom-study-review-due-cards-with-certain-tags",
"custom-study-review-forgotten-cards",
"custom-study-search-matches",
"custom-study-select",
"custom-study-study-type",
"custom-study-the-selected-options-did-not-match",
"database-check-rebuilt",
"database-check-title",
"deck-config-title",
"deck-config-used-by-decks",
"deck-options-add-options-group",
"deck-options-answer-time-cap",
"deck-options-bury-related-new-cards",
"deck-options-bury-related-reviews",
"deck-options-defaults",
"deck-options-delete-options-full-sync",
"deck-options-display-in-order-added",
"deck-options-display-in-random-order",
"deck-options-full-sync-required",
"deck-options-group-name",
"deck-options-max-new-per-day",
"deck-options-max-reviews-per-day",
"deck-options-new-cards",
"deck-options-options-group",
"deck-options-replay-q-audio-in-answer",
"deck-options-reset-all-settings-to-defaults",
"deck-options-restore-defaults",
"deck-options-steps",
"decks-a-deck-must-be-provided",
"decks-a-deck-named-already-exists",
"decks-add-empty-deck",
"decks-addexport",
"decks-build",
"decks-card-limit",
"decks-custom-steps",
"decks-deck",
"decks-deck-label",
"decks-deck-name",
"decks-deck-options",
"decks-download-link",
"decks-enable-second-filter",
"decks-export-collection",
"decks-filter2",
"decks-import-from-itunes",
"decks-link-to-apkg-file-to-import",
"decks-return-by-delete",
"decks-shared-deck-list",
"decks-study",
"decks-sync",
"decks-synchronize",
"decks-the-provided-deck-does-not-exist",
"decks-will-be-returned",
"editing-add-media",
"editing-bold",
"editing-cant-edit-original-image-data",
"editing-card-unsuspended",
"editing-discard",
"editing-discard-changes-question",
"editing-fields",
"editing-from-camera",
"editing-from-file",
"editing-from-photos",
"editing-italic",
"editing-keep-editing",
"editing-next-cloze",
"editing-next-field",
"editing-note-type-prompt",
"editing-note-unsuspended",
"editing-same-cloze",
"editing-tags",
"editing-unable-to-obtain-image",
"editing-unable-to-read-file",
"editing-underline",
"editing-unexpected-file-extension",
"editing-unexpected-rich-text-format-please",
"editing-unexpected-status-code",
"editing-unsuspend-card",
"editing-unsuspend-note",
"empty-cards-delete-button",
"empty-cards-delete-empty-cards",
"empty-cards-delete-empty-notes",
"empty-cards-deleted-count",
"empty-cards-deleting",
"empty-cards-not-found",
"empty-cards-window-title",
"errors100-tags-max",
"exporting-collection-saved-to-itunes",
"exporting-export-to-itunes",
"exporting-export-to-share-sheet",
"exporting-exporting",
"exporting-include-media2",
"exporting-media-files-exported-d",
"findreplace-notes-updated",
"importing-delete-imported-file",
"importing-import-complete",
"importing-importing",
"importing-no-apkg-or-colpkg-files-were",
"importing-overwrite-via-import",
"importing-please-choose-a-file",
"importing-processed-media-files-d",
"importing-replace-collection",
"media-check-check-media-action",
"media-check-delete-unused",
"media-check-delete-unused-complete",
"media-check-empty-trash",
"media-check-files-remaining",
"media-check-restore-trash",
"media-check-trash-emptied",
"media-check-trash-restored",
"media-check-window-title",
"media-error-initializing-recorder",
"media-error-playing-audio-full",
"media-privacy-microphone",
"media-recording",
"notetypes-back-field",
"notetypes-cloze-name",
"notetypes-front-field",
"preferences-about",
"preferences-always-duck-and-ignore-mute",
"preferences-answer-keeps-zoom",
"preferences-answer-side",
"preferences-audio-buttons",
"preferences-auto-advance-answer-action",
"preferences-auto-advance-answer-seconds",
"preferences-auto-advance-auto-advance-to-start",
"preferences-auto-advance-do-nothing",
"preferences-auto-advance-question-seconds",
"preferences-auto-advance-show-reminder",
"preferences-auto-advance-wait-for-audio",
"preferences-backup-available-backups",
"preferences-backup-create-now",
"preferences-backup-maximum-backups",
"preferences-backup-minutes-between-backups",
"preferences-backup-revert-to-backup",
"preferences-backup-revert-to-backup-confirm",
"preferences-backup-reverted-to-backup",
"preferences-backups",
"preferences-bottom-bar-size",
"preferences-bottom-center",
"preferences-bottom-left",
"preferences-bottom-right",
"preferences-button-d",
"preferences-collection-day-starts",
"preferences-collection-learn-ahead-minutes",
"preferences-collection-mix",
"preferences-collection-new-first",
"preferences-collection-newreview-order",
"preferences-collection-reviews-first",
"preferences-double-tap-prevention",
"preferences-drawing-screen-ignores-fingers",
"preferences-editing",
"preferences-editing-convert-smart-quotes",
"preferences-editing-crop-camera-photos",
"preferences-editing-max-image-size",
"preferences-editing-resize-on-paste",
"preferences-feedback-ticks",
"preferences-fine",
"preferences-force-sync-confirm",
"preferences-full-sync",
"preferences-gamepad-button-mapping",
"preferences-gamepad-menu-button",
"preferences-gamepads",
"preferences-height",
"preferences-huge",
"preferences-ignore-fingers",
"preferences-interrupt-current-audio",
"preferences-large",
"preferences-left",
"preferences-left-shoulder",
"preferences-left-thumbstick-button",
"preferences-left-trigger",
"preferences-logged-in-as",
"preferences-long",
"preferences-mid-center",
"preferences-mid-left",
"preferences-mid-right",
"preferences-never-show-scratchpad",
"preferences-never-show-scratchpad-enabled",
"preferences-never-type-answer",
"preferences-next-times",
"preferences-normal",
"preferences-notifications",
"preferences-notifications-alert-time",
"preferences-notifications-alert-when-due",
"preferences-notifications-app-icon-shows-due-count",
"preferences-notifications-settings-app-enable-notifications",
"preferences-paste-clipboard-images-as-png",
"preferences-pen-size",
"preferences-preferences",
"preferences-question-side",
"preferences-remaining-count",
"preferences-right",
"preferences-right-shoulder",
"preferences-right-thumbstick-button",
"preferences-right-trigger",
"preferences-scheduling",
"preferences-scratchpad-below-buttons",
"preferences-scratchpad-transparency",
"preferences-scratchpad-transparency-full",
"preferences-scratchpad-transparency-medium",
"preferences-scratchpad-transparency-none",
"preferences-scratchpad-transparency-slight",
"preferences-shake-action",
"preferences-short",
"preferences-show-bottom-bar",
"preferences-show-grid",
"preferences-show-top-bar",
"preferences-small",
"preferences-swipe-down",
"preferences-swipe-left",
"preferences-swipe-right",
"preferences-swipe-up",
"preferences-swipes",
"preferences-swipes-must-begin-from-the-far",
"preferences-sync-sounds-images",
"preferences-syncing",
"preferences-tap-to-sync",
"preferences-taps",
"preferences-theme",
"preferences-theme-bar-style",
"preferences-theme-black",
"preferences-theme-dark",
"preferences-theme-dark-translucent",
"preferences-theme-force-off",
"preferences-theme-force-on",
"preferences-theme-light-translucent",
"preferences-theme-night-mode-desc",
"preferences-theme-night-mode-same-as-system",
"preferences-theme-slate",
"preferences-thick",
"preferences-tools-overlay-button",
"preferences-tools-overlay-position",
"preferences-top-center",
"preferences-top-left",
"preferences-top-right",
"preferences-undo-clears-all",
"preferences-when-answer-shown",
"preferences-when-question-shown",
"preferences-you-have-been-logged-out",
"profiles-a-profile-with-that-name-already",
"profiles-add-profile",
"profiles-creating-backup",
"profiles-finishing-backup",
"profiles-please-provide-some-text-avoiding-symbols",
"profiles-please-select-another-profile-first",
"profiles-profile-name",
"profiles-profiles",
"profiles-rename-profile",
"profiles-unable-to-open-safari-please",
"profiles-user1",
"profiles-welcome",
"scheduling-automatically-play-audio",
"scheduling-easy-bonus",
"scheduling-easy-interval",
"scheduling-end",
"scheduling-forgot-cards",
"scheduling-general",
"scheduling-graduating-interval",
"scheduling-interval-modifier",
"scheduling-lapses",
"scheduling-leech-action",
"scheduling-leech-threshold",
"scheduling-maximum-interval",
"scheduling-minimum-interval",
"scheduling-new-cards",
"scheduling-new-interval",
"scheduling-order",
"scheduling-review",
"scheduling-reviews",
"scheduling-set-due-date-done",
"scheduling-set-due-date-prompt",
"scheduling-set-due-date-prompt-hint",
"scheduling-starting-ease",
"scheduling-steps-in-minutes",
"scheduling-tag-only",
"scheduling-update-button",
"scheduling-update-done",
"scheduling-update-later-button",
"scheduling-update-more-info-button",
"scheduling-update-soon",
"statistics-answer-buttons-title",
"statistics-reviews",
"studying-again",
"studying-answer-time-elapsed",
"studying-auto-advance-starting",
"studying-auto-advance-stopped",
"studying-bury-card",
"studying-bury-note",
"studying-card-suspended",
"studying-card-was-a-leech",
"studying-cards-buried",
"studying-delete-note",
"studying-easy",
"studying-edit",
"studying-empty",
"studying-finish",
"studying-good",
"studying-hard",
"studying-have-ready-to-study",
"studying-note-suspended",
"studying-pause-audio",
"studying-please-run-empty-cards-on-the",
"studying-please-use-record-voice-first",
"studying-replay-card",
"studying-replay-voice",
"studying-show-answer",
"studying-suspend-note",
"studying-type-in-the-answer",
"sync-abort-button",
"sync-cancel-button",
"sync-checking",
"sync-confirm-empty-download",
"sync-conflict-explanation",
"sync-connecting",
"sync-download-from-ankiweb",
"sync-downloading-from-ankiweb",
"sync-email-address",
"sync-log-out-button",
"sync-login",
"sync-media-aborted",
"sync-media-complete",
"sync-media-disabled",
"sync-media-failed",
"sync-media-is-syncing",
"sync-media-log-button",
"sync-media-log-title",
"sync-media-missing-file",
"sync-media-show-progress",
"sync-media-starting",
"sync-must-wait-for-end",
"sync-password",
"sync-please-enter-your-ankiweb-details",
"sync-upload-to-ankiweb",
"sync-uploading-to-ankiweb",
"sync-wrong-pass",
"undo-action-redone",
"undo-action-undone",
"undo-redo",
"undo-redo-action",
"undo-undo",
"undo-undo-action",
"urlscheme-a-note-type-must-be-provided",
"urlscheme-a-note-with-the-same-first",
"urlscheme-a-profile-must-be-provided",
"urlscheme-added-note",
"urlscheme-invalid-profile-name",
"urlscheme-the-provided-note-type-does-not",
"urlscheme-unable-to-open-profile"
]

View file

@ -534,6 +534,15 @@ alias(
],
)
alias(
name = "walkdir",
actual = "@raze__walkdir__2_3_2//:walkdir",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "zip",
actual = "@raze__zip__0_5_13//:zip",

View file

@ -534,6 +534,15 @@ alias(
],
)
alias(
name = "walkdir",
actual = "@raze__walkdir__2_3_2//:walkdir",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "zip",
actual = "@raze__zip__0_5_13//:zip",

View file

@ -534,6 +534,15 @@ alias(
],
)
alias(
name = "walkdir",
actual = "@raze__walkdir__2_3_2//:walkdir",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "zip",
actual = "@raze__zip__0_5_13//:zip",

View file

@ -0,0 +1,61 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
load("@rules_rust//rust:rust.bzl", "rust_binary", "rust_library", "rust_test")
load("@rules_rust//cargo:cargo_build_script.bzl", "cargo_build_script")
load("//rslib:rustfmt.bzl", "rustfmt_fix", "rustfmt_test")
# Library
#######################
rust_library(
name = "anki_i18n_helpers",
srcs = glob(
[
"src/**/*.rs",
],
exclude = ["src/bin/**"],
),
visibility = ["//rslib:__subpackages__"],
deps = [
"//rslib/i18n_helpers/cargo:fluent_syntax",
"//rslib/i18n_helpers/cargo:lazy_static",
"//rslib/i18n_helpers/cargo:regex",
"//rslib/i18n_helpers/cargo:serde_json",
"//rslib/i18n_helpers/cargo:walkdir",
],
)
rust_binary(
name = "write_ftl_json",
srcs = ["src/bin/write_ftl_json.rs"],
deps = [":anki_i18n_helpers"],
)
rust_binary(
name = "garbage_collect_ftl_entries",
srcs = ["src/bin/garbage_collect_ftl_entries.rs"],
deps = [":anki_i18n_helpers"],
)
# Tests
#######################
rust_test(
name = "i18n_helpers_tests",
crate = ":anki_i18n_helpers",
)
rustfmt_test(
name = "format_check",
srcs = glob([
"**/*.rs",
]),
)
rustfmt_fix(
name = "format",
srcs = glob([
"**/*.rs",
]),
)

View file

@ -0,0 +1,20 @@
[package]
name = "anki_i18n_helpers"
version = "0.0.0"
edition = "2018"
authors = ["Ankitects Pty Ltd and contributors"]
license = "AGPL-3.0-or-later"
description = "Helpers for Anki's i18n system"
[lib]
name = "anki_i18n_helpers"
path = "src/lib.rs"
# After updating anything below, run ../cargo/update.py
[dependencies]
lazy_static = "1.4.0"
regex = "1.5.4"
serde_json = "1.0.68"
walkdir = "2"
fluent-syntax = "0.11.0"

View file

@ -0,0 +1,553 @@
"""
@generated
cargo-raze generated Bazel file.
DO NOT EDIT! Replaced on runs of cargo-raze
"""
package(default_visibility = ["//visibility:public"])
licenses([
"notice", # See individual crates for specific licenses
])
# Aliased targets
alias(
name = "ammonia",
actual = "@raze__ammonia__3_1_2//:ammonia",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "async_trait",
actual = "@raze__async_trait__0_1_51//:async_trait",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "blake3",
actual = "@raze__blake3__1_1_0//:blake3",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "bytes",
actual = "@raze__bytes__1_1_0//:bytes",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "chrono",
actual = "@raze__chrono__0_4_19//:chrono",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "coarsetime",
actual = "@raze__coarsetime__0_1_19//:coarsetime",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "env_logger",
actual = "@raze__env_logger__0_9_0//:env_logger",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "flate2",
actual = "@raze__flate2__1_0_22//:flate2",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "fluent",
actual = "@raze__fluent__0_16_0//:fluent",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "fluent_bundle",
actual = "@raze__fluent_bundle__0_15_2//:fluent_bundle",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "fluent_syntax",
actual = "@raze__fluent_syntax__0_11_0//:fluent_syntax",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "fnv",
actual = "@raze__fnv__1_0_7//:fnv",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "futures",
actual = "@raze__futures__0_3_17//:futures",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "hex",
actual = "@raze__hex__0_4_3//:hex",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "htmlescape",
actual = "@raze__htmlescape__0_3_1//:htmlescape",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "inflections",
actual = "@raze__inflections__1_1_1//:inflections",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "intl_memoizer",
actual = "@raze__intl_memoizer__0_5_1//:intl_memoizer",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "itertools",
actual = "@raze__itertools__0_10_1//:itertools",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "lazy_static",
actual = "@raze__lazy_static__1_4_0//:lazy_static",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "linkcheck",
actual = "@raze__linkcheck__0_4_1_alpha_0//:linkcheck",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "nom",
actual = "@raze__nom__7_0_0//:nom",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "num_enum",
actual = "@raze__num_enum__0_5_4//:num_enum",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "num_format",
actual = "@raze__num_format__0_4_0//:num_format",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "num_integer",
actual = "@raze__num_integer__0_1_44//:num_integer",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "once_cell",
actual = "@raze__once_cell__1_8_0//:once_cell",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "pct_str",
actual = "@raze__pct_str__1_1_0//:pct_str",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "phf",
actual = "@raze__phf__0_10_0//:phf",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "pin_project",
actual = "@raze__pin_project__1_0_8//:pin_project",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "proc_macro_nested",
actual = "@raze__proc_macro_nested__0_1_7//:proc_macro_nested",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "prost",
actual = "@raze__prost__0_8_0//:prost",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "prost_build",
actual = "@raze__prost_build__0_8_0//:prost_build",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "pulldown_cmark",
actual = "@raze__pulldown_cmark__0_8_0//:pulldown_cmark",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "pyo3",
actual = "@raze__pyo3__0_14_5//:pyo3",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "rand",
actual = "@raze__rand__0_8_4//:rand",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "regex",
actual = "@raze__regex__1_5_4//:regex",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "reqwest",
actual = "@raze__reqwest__0_11_3//:reqwest",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "rusqlite",
actual = "@raze__rusqlite__0_25_3//:rusqlite",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "scopeguard",
actual = "@raze__scopeguard__1_1_0//:scopeguard",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "serde",
actual = "@raze__serde__1_0_130//:serde",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "serde_aux",
actual = "@raze__serde_aux__2_3_0//:serde_aux",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "serde_derive",
actual = "@raze__serde_derive__1_0_130//:serde_derive",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "serde_json",
actual = "@raze__serde_json__1_0_68//:serde_json",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "serde_repr",
actual = "@raze__serde_repr__0_1_7//:serde_repr",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "serde_tuple",
actual = "@raze__serde_tuple__0_5_0//:serde_tuple",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "sha1",
actual = "@raze__sha1__0_6_0//:sha1",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "slog",
actual = "@raze__slog__2_7_0//:slog",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "slog_async",
actual = "@raze__slog_async__2_7_0//:slog_async",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "slog_envlogger",
actual = "@raze__slog_envlogger__2_2_0//:slog_envlogger",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "slog_term",
actual = "@raze__slog_term__2_8_0//:slog_term",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "strum",
actual = "@raze__strum__0_21_0//:strum",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "tempfile",
actual = "@raze__tempfile__3_2_0//:tempfile",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "tokio",
actual = "@raze__tokio__1_12_0//:tokio",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "tokio_util",
actual = "@raze__tokio_util__0_6_8//:tokio_util",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "unic_langid",
actual = "@raze__unic_langid__0_9_0//:unic_langid",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "unic_ucd_category",
actual = "@raze__unic_ucd_category__0_9_0//:unic_ucd_category",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "unicase",
actual = "@raze__unicase__2_6_0//:unicase",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "unicode_normalization",
actual = "@raze__unicode_normalization__0_1_19//:unicode_normalization",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "utime",
actual = "@raze__utime__0_3_1//:utime",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "walkdir",
actual = "@raze__walkdir__2_3_2//:walkdir",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "zip",
actual = "@raze__zip__0_5_13//:zip",
tags = [
"cargo-raze",
"manual",
],
)

View file

@ -0,0 +1,11 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
/// Delete every entry in the ftl files that is not mentioned in another message
/// or a given json.
/// First argument is the root of the ftl files, second one is the root of the
/// json files.
fn main() {
let args: Vec<String> = std::env::args().collect();
anki_i18n_helpers::garbage_collection::remove_unused_ftl_messages(&args[1], &args[2]);
}

View file

@ -0,0 +1,11 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
/// Extract references from all Rust, Python, TS, Svelte and Designer files in
/// the given roots, convert them to ftl names case and write them as a json to
/// the target file.
/// First argument is the target file name, following are source roots.
fn main() {
let args: Vec<String> = std::env::args().collect();
anki_i18n_helpers::garbage_collection::extract_ftl_references(&args[2..], &args[1]);
}

View file

@ -0,0 +1,164 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{collections::HashSet, fs, io::BufReader, iter::FromIterator};
use fluent_syntax::{ast, parser};
use lazy_static::lazy_static;
use regex::Regex;
use serde_json;
use walkdir::{DirEntry, WalkDir};
use crate::serialize;
/// Extract references from all Rust, Python, TS, Svelte, Swift and Designer files in
/// the `roots`, convert them to kebab case and write them as a json to the
/// target file.
pub fn extract_ftl_references<S1: AsRef<str>, S2: AsRef<str>>(roots: &[S1], target: S2) {
let mut refs = HashSet::new();
for root in roots {
for_files_with_ending(root.as_ref(), "", |entry| {
extract_references_from_file(&mut refs, &entry)
})
}
let mut refs = Vec::from_iter(refs);
refs.sort();
serde_json::to_writer_pretty(
fs::File::create(target.as_ref()).expect("failed to create file"),
&refs,
)
.expect("failed to write file");
}
/// Delete every entry in `ftl_root` that is not mentioned in another message
/// or any json in `json_root`.
pub fn remove_unused_ftl_messages<S: AsRef<str>>(ftl_root: S, json_root: S) {
let mut used_ftls = HashSet::new();
import_used_messages(json_root.as_ref(), &mut used_ftls);
extract_nested_messages_and_terms(ftl_root.as_ref(), &mut used_ftls);
strip_unused_ftl_messages_and_terms(ftl_root.as_ref(), &used_ftls);
}
fn for_files_with_ending(root: &str, file_ending: &str, mut op: impl FnMut(DirEntry)) {
for res in WalkDir::new(root) {
let entry = res.expect("failed to visit dir");
if entry.file_type().is_file()
&& entry
.file_name()
.to_str()
.expect("non-unicode filename")
.ends_with(file_ending)
{
op(entry);
}
}
}
fn import_used_messages(json_root: &str, used_ftls: &mut HashSet<String>) {
for_files_with_ending(json_root, ".json", |entry| {
let buffer = BufReader::new(fs::File::open(entry.path()).expect("failed to open file"));
let refs: Vec<String> = serde_json::from_reader(buffer).expect("failed to parse json");
used_ftls.extend(refs);
})
}
fn extract_nested_messages_and_terms(ftl_root: &str, used_ftls: &mut HashSet<String>) {
lazy_static! {
static ref REFERENCE: Regex = Regex::new(r"\{\s*-?([-0-9a-z]+)\s*\}").unwrap();
}
for_files_with_ending(ftl_root, ".ftl", |entry| {
let source = fs::read_to_string(entry.path()).expect("file not readable");
for caps in REFERENCE.captures_iter(&source) {
used_ftls.insert(caps[1].to_string());
}
})
}
fn strip_unused_ftl_messages_and_terms(ftl_root: &str, used_ftls: &HashSet<String>) {
for_files_with_ending(ftl_root, ".ftl", |entry| {
let ftl = fs::read_to_string(entry.path()).expect("failed to open file");
let mut ast = parser::parse(ftl.as_str()).expect("failed to parse ftl");
let num_entries = ast.body.len();
ast.body = ast
.body
.into_iter()
.filter(|entry| match entry {
ast::Entry::Message(msg) => used_ftls.contains(msg.id.name),
ast::Entry::Term(term) => used_ftls.contains(term.id.name),
_ => true,
})
.collect();
if ast.body.len() < num_entries {
fs::write(entry.path(), serialize::serialize(&ast)).expect("failed to write file");
}
});
}
fn extract_references_from_file(refs: &mut HashSet<String>, entry: &DirEntry) {
lazy_static! {
static ref SNAKECASE_TR: Regex = Regex::new(r"\Wtr\s*\.([0-9a-z_]+)\W").unwrap();
static ref CAMELCASE_TR: Regex = Regex::new(r"\Wtr2?\.([0-9A-Za-z_]+)\W").unwrap();
static ref DESIGNER_STYLE_TR: Regex = Regex::new(r"<string>([0-9a-z_]+)</string>").unwrap();
}
let file_name = entry.file_name().to_str().expect("non-unicode filename");
let (regex, case_conversion): (&Regex, fn(&str) -> String) =
if file_name.ends_with(".rs") || file_name.ends_with(".py") {
(&SNAKECASE_TR, snake_to_kebab_case)
} else if file_name.ends_with(".ts")
|| file_name.ends_with(".svelte")
|| file_name.ends_with(".swift")
{
(&CAMELCASE_TR, camel_to_kebab_case)
} else if file_name.ends_with(".ui") {
(&DESIGNER_STYLE_TR, snake_to_kebab_case)
} else {
return;
};
let source = fs::read_to_string(entry.path()).expect("file not readable");
for caps in regex.captures_iter(&source) {
refs.insert(case_conversion(&caps[1]));
}
}
fn snake_to_kebab_case(name: &str) -> String {
name.replace('_', "-")
}
fn camel_to_kebab_case(name: &str) -> String {
let mut kebab = String::with_capacity(name.len() + 8);
for ch in name.chars() {
if ch.is_ascii_uppercase() || ch == '_' {
kebab.push('-');
}
if ch != '_' {
kebab.push(ch.to_ascii_lowercase());
}
}
kebab
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn case_conversion() {
assert_eq!(snake_to_kebab_case("foo"), "foo");
assert_eq!(snake_to_kebab_case("foo_bar"), "foo-bar");
assert_eq!(snake_to_kebab_case("foo_123"), "foo-123");
assert_eq!(snake_to_kebab_case("foo123"), "foo123");
assert_eq!(camel_to_kebab_case("foo"), "foo");
assert_eq!(camel_to_kebab_case("fooBar"), "foo-bar");
assert_eq!(camel_to_kebab_case("foo_123"), "foo-123");
assert_eq!(camel_to_kebab_case("foo123"), "foo123");
assert_eq!(camel_to_kebab_case("123foo"), "123foo");
assert_eq!(camel_to_kebab_case("123Foo"), "123-foo");
}
}

View file

@ -0,0 +1,5 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
pub mod garbage_collection;
pub mod serialize;

View file

@ -0,0 +1,444 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
// copied from https://github.com/projectfluent/fluent-rs/pull/241
use fluent_syntax::{ast::*, parser::Slice};
use std::fmt::{self, Error, Write};
pub fn serialize<'s, S: Slice<'s>>(resource: &Resource<S>) -> String {
serialize_with_options(resource, Options::default())
}
pub fn serialize_with_options<'s, S: Slice<'s>>(
resource: &Resource<S>,
options: Options,
) -> String {
let mut ser = Serializer::new(options);
ser.serialize_resource(resource)
.expect("Writing to an in-memory buffer never fails");
ser.into_serialized_text()
}
#[derive(Debug)]
pub struct Serializer {
writer: TextWriter,
options: Options,
state: State,
}
impl Serializer {
pub fn new(options: Options) -> Self {
Serializer {
writer: TextWriter::default(),
options,
state: State::default(),
}
}
pub fn serialize_resource<'s, S: Slice<'s>>(&mut self, res: &Resource<S>) -> Result<(), Error> {
for entry in &res.body {
match entry {
Entry::Message(msg) => self.serialize_message(msg)?,
Entry::Term(term) => self.serialize_term(term)?,
Entry::Comment(comment) => self.serialize_free_comment(comment, "#")?,
Entry::GroupComment(comment) => self.serialize_free_comment(comment, "##")?,
Entry::ResourceComment(comment) => self.serialize_free_comment(comment, "###")?,
Entry::Junk { content } if self.options.with_junk => {
self.serialize_junk(content.as_ref())?
}
Entry::Junk { .. } => continue,
}
self.state.has_entries = true;
}
Ok(())
}
pub fn into_serialized_text(self) -> String {
self.writer.buffer
}
fn serialize_junk(&mut self, junk: &str) -> Result<(), Error> {
self.writer.write_literal(junk)
}
fn serialize_free_comment<'s, S: Slice<'s>>(
&mut self,
comment: &Comment<S>,
prefix: &str,
) -> Result<(), Error> {
if self.state.has_entries {
self.writer.newline();
}
self.serialize_comment(comment, prefix)?;
self.writer.newline();
Ok(())
}
fn serialize_comment<'s, S: Slice<'s>>(
&mut self,
comment: &Comment<S>,
prefix: &str,
) -> Result<(), Error> {
for line in &comment.content {
self.writer.write_literal(prefix)?;
if !line.as_ref().trim().is_empty() {
self.writer.write_literal(" ")?;
self.writer.write_literal(line.as_ref())?;
}
self.writer.newline();
}
Ok(())
}
fn serialize_message<'s, S: Slice<'s>>(&mut self, msg: &Message<S>) -> Result<(), Error> {
if let Some(comment) = msg.comment.as_ref() {
self.serialize_comment(comment, "#")?;
}
self.writer.write_literal(msg.id.name.as_ref())?;
self.writer.write_literal(" =")?;
if let Some(value) = msg.value.as_ref() {
self.serialize_pattern(value)?;
}
self.serialize_attributes(&msg.attributes)?;
self.writer.newline();
Ok(())
}
fn serialize_term<'s, S: Slice<'s>>(&mut self, term: &Term<S>) -> Result<(), Error> {
if let Some(comment) = term.comment.as_ref() {
self.serialize_comment(comment, "#")?;
}
self.writer.write_literal("-")?;
self.writer.write_literal(term.id.name.as_ref())?;
self.writer.write_literal(" =")?;
self.serialize_pattern(&term.value)?;
self.serialize_attributes(&term.attributes)?;
self.writer.newline();
Ok(())
}
fn serialize_pattern<'s, S: Slice<'s>>(&mut self, pattern: &Pattern<S>) -> Result<(), Error> {
let start_on_newline = pattern.elements.iter().any(|elem| match elem {
PatternElement::TextElement { value } => value.as_ref().contains('\n'),
PatternElement::Placeable { expression } => is_select_expr(expression),
});
if start_on_newline {
self.writer.newline();
self.writer.indent();
} else {
self.writer.write_literal(" ")?;
}
for element in &pattern.elements {
self.serialize_element(element)?;
}
if start_on_newline {
self.writer.dedent();
}
Ok(())
}
fn serialize_attributes<'s, S: Slice<'s>>(
&mut self,
attrs: &[Attribute<S>],
) -> Result<(), Error> {
if attrs.is_empty() {
return Ok(());
}
self.writer.indent();
for attr in attrs {
self.writer.newline();
self.serialize_attribute(attr)?;
}
self.writer.dedent();
Ok(())
}
fn serialize_attribute<'s, S: Slice<'s>>(&mut self, attr: &Attribute<S>) -> Result<(), Error> {
self.writer.write_literal(".")?;
self.writer.write_literal(attr.id.name.as_ref())?;
self.writer.write_literal(" =")?;
self.serialize_pattern(&attr.value)?;
Ok(())
}
fn serialize_element<'s, S: Slice<'s>>(
&mut self,
elem: &PatternElement<S>,
) -> Result<(), Error> {
match elem {
PatternElement::TextElement { value } => self.writer.write_literal(value.as_ref()),
PatternElement::Placeable { expression } => match expression {
Expression::Inline(InlineExpression::Placeable { expression }) => {
// A placeable inside a placeable is a special case because we
// don't want the braces to look silly (e.g. "{ { Foo() } }").
self.writer.write_literal("{{ ")?;
self.serialize_expression(expression)?;
self.writer.write_literal(" }}")?;
Ok(())
}
Expression::Select { .. } => {
// select adds its own newline and indent, emit the brace
// *without* a space so we don't get 5 spaces instead of 4
self.writer.write_literal("{ ")?;
self.serialize_expression(expression)?;
self.writer.write_literal("}")?;
Ok(())
}
Expression::Inline(_) => {
self.writer.write_literal("{ ")?;
self.serialize_expression(expression)?;
self.writer.write_literal(" }")?;
Ok(())
}
},
}
}
fn serialize_expression<'s, S: Slice<'s>>(
&mut self,
expr: &Expression<S>,
) -> Result<(), Error> {
match expr {
Expression::Inline(inline) => self.serialize_inline_expression(inline),
Expression::Select { selector, variants } => {
self.serialize_select_expression(selector, variants)
}
}
}
fn serialize_inline_expression<'s, S: Slice<'s>>(
&mut self,
expr: &InlineExpression<S>,
) -> Result<(), Error> {
match expr {
InlineExpression::StringLiteral { value } => {
self.writer.write_literal("\"")?;
self.writer.write_literal(value.as_ref())?;
self.writer.write_literal("\"")?;
Ok(())
}
InlineExpression::NumberLiteral { value } => self.writer.write_literal(value.as_ref()),
InlineExpression::VariableReference {
id: Identifier { name: value },
} => {
self.writer.write_literal("$")?;
self.writer.write_literal(value.as_ref())?;
Ok(())
}
InlineExpression::FunctionReference { id, arguments } => {
self.writer.write_literal(id.name.as_ref())?;
self.serialize_call_arguments(arguments)?;
Ok(())
}
InlineExpression::MessageReference { id, attribute } => {
self.writer.write_literal(id.name.as_ref())?;
if let Some(attr) = attribute.as_ref() {
self.writer.write_literal(".")?;
self.writer.write_literal(attr.name.as_ref())?;
}
Ok(())
}
InlineExpression::TermReference {
id,
attribute,
arguments,
} => {
self.writer.write_literal("-")?;
self.writer.write_literal(id.name.as_ref())?;
if let Some(attr) = attribute.as_ref() {
self.writer.write_literal(".")?;
self.writer.write_literal(attr.name.as_ref())?;
}
if let Some(args) = arguments.as_ref() {
self.serialize_call_arguments(args)?;
}
Ok(())
}
InlineExpression::Placeable { expression } => {
self.writer.write_literal("{")?;
self.serialize_expression(expression)?;
self.writer.write_literal("}")?;
Ok(())
}
}
}
fn serialize_select_expression<'s, S: Slice<'s>>(
&mut self,
selector: &InlineExpression<S>,
variants: &[Variant<S>],
) -> Result<(), Error> {
self.serialize_inline_expression(selector)?;
self.writer.write_literal(" ->")?;
self.writer.newline();
self.writer.indent();
for variant in variants {
self.serialize_variant(variant)?;
self.writer.newline();
}
self.writer.dedent();
Ok(())
}
fn serialize_variant<'s, S: Slice<'s>>(&mut self, variant: &Variant<S>) -> Result<(), Error> {
if variant.default {
self.writer.write_char_into_indent('*');
}
self.writer.write_literal("[")?;
self.serialize_variant_key(&variant.key)?;
self.writer.write_literal("]")?;
self.serialize_pattern(&variant.value)?;
Ok(())
}
fn serialize_variant_key<'s, S: Slice<'s>>(
&mut self,
key: &VariantKey<S>,
) -> Result<(), Error> {
match key {
VariantKey::NumberLiteral { value } | VariantKey::Identifier { name: value } => {
self.writer.write_literal(value.as_ref())
}
}
}
fn serialize_call_arguments<'s, S: Slice<'s>>(
&mut self,
args: &CallArguments<S>,
) -> Result<(), Error> {
let mut argument_written = false;
self.writer.write_literal("(")?;
for positional in &args.positional {
if argument_written {
self.writer.write_literal(", ")?;
}
self.serialize_inline_expression(positional)?;
argument_written = true;
}
for named in &args.named {
if argument_written {
self.writer.write_literal(", ")?;
}
self.writer.write_literal(named.name.name.as_ref())?;
self.writer.write_literal(": ")?;
self.serialize_inline_expression(&named.value)?;
argument_written = true;
}
self.writer.write_literal(")")?;
Ok(())
}
}
fn is_select_expr<'s, S: Slice<'s>>(expr: &Expression<S>) -> bool {
match expr {
Expression::Select { .. } => true,
Expression::Inline(InlineExpression::Placeable { expression }) => {
is_select_expr(&*expression)
}
Expression::Inline(_) => false,
}
}
#[derive(Debug, Default, Copy, Clone, PartialEq)]
pub struct Options {
pub with_junk: bool,
}
#[derive(Debug, Default, PartialEq)]
struct State {
has_entries: bool,
}
#[derive(Debug, Clone, Default)]
struct TextWriter {
buffer: String,
indent_level: usize,
}
impl TextWriter {
fn indent(&mut self) {
self.indent_level += 1;
}
fn dedent(&mut self) {
self.indent_level = self
.indent_level
.checked_sub(1)
.expect("Dedenting without a corresponding indent");
}
fn write_indent(&mut self) {
for _ in 0..self.indent_level {
self.buffer.push_str(" ");
}
}
fn newline(&mut self) {
self.buffer.push('\n');
}
fn write_literal(&mut self, mut item: &str) -> fmt::Result {
if self.buffer.ends_with('\n') {
// we've just added a newline, make sure it's properly indented
self.write_indent();
// we've just added indentation, so we don't care about leading
// spaces
item = item.trim_start_matches(' ');
}
write!(self.buffer, "{}", item)
}
fn write_char_into_indent(&mut self, ch: char) {
if self.buffer.ends_with('\n') {
self.write_indent();
}
self.buffer.pop();
self.buffer.push(ch);
}
}

View file

@ -122,57 +122,57 @@ impl Note {
}
impl Column {
pub fn cards_mode_label(self, i18n: &I18n) -> String {
pub fn cards_mode_label(self, tr: &I18n) -> String {
match self {
Self::Answer => i18n.browsing_answer(),
Self::CardMod => i18n.search_card_modified(),
Self::Cards => i18n.browsing_card(),
Self::Deck => i18n.decks_deck(),
Self::Due => i18n.statistics_due_date(),
Self::Custom => i18n.browsing_addon(),
Self::Ease => i18n.browsing_ease(),
Self::Interval => i18n.browsing_interval(),
Self::Lapses => i18n.scheduling_lapses(),
Self::NoteCreation => i18n.browsing_created(),
Self::NoteMod => i18n.search_note_modified(),
Self::Notetype => i18n.browsing_note(),
Self::Question => i18n.browsing_question(),
Self::Reps => i18n.scheduling_reviews(),
Self::SortField => i18n.browsing_sort_field(),
Self::Tags => i18n.editing_tags(),
Self::Answer => tr.browsing_answer(),
Self::CardMod => tr.search_card_modified(),
Self::Cards => tr.browsing_card(),
Self::Deck => tr.decks_deck(),
Self::Due => tr.statistics_due_date(),
Self::Custom => tr.browsing_addon(),
Self::Ease => tr.browsing_ease(),
Self::Interval => tr.browsing_interval(),
Self::Lapses => tr.scheduling_lapses(),
Self::NoteCreation => tr.browsing_created(),
Self::NoteMod => tr.search_note_modified(),
Self::Notetype => tr.browsing_note(),
Self::Question => tr.browsing_question(),
Self::Reps => tr.scheduling_reviews(),
Self::SortField => tr.browsing_sort_field(),
Self::Tags => tr.editing_tags(),
}
.into()
}
pub fn notes_mode_label(self, i18n: &I18n) -> String {
pub fn notes_mode_label(self, tr: &I18n) -> String {
match self {
Self::CardMod => i18n.search_card_modified(),
Self::Cards => i18n.editing_cards(),
Self::Ease => i18n.browsing_average_ease(),
Self::Interval => i18n.browsing_average_interval(),
Self::Reps => i18n.scheduling_reviews(),
_ => return self.cards_mode_label(i18n),
Self::CardMod => tr.search_card_modified(),
Self::Cards => tr.editing_cards(),
Self::Ease => tr.browsing_average_ease(),
Self::Interval => tr.browsing_average_interval(),
Self::Reps => tr.scheduling_reviews(),
_ => return self.cards_mode_label(tr),
}
.into()
}
pub fn cards_mode_tooltip(self, i18n: &I18n) -> String {
pub fn cards_mode_tooltip(self, tr: &I18n) -> String {
match self {
Self::Answer => i18n.browsing_tooltip_answer(),
Self::CardMod => i18n.browsing_tooltip_card_modified(),
Self::Cards => i18n.browsing_tooltip_card(),
Self::NoteMod => i18n.browsing_tooltip_note_modified(),
Self::Notetype => i18n.browsing_tooltip_notetype(),
Self::Question => i18n.browsing_tooltip_question(),
Self::Answer => tr.browsing_tooltip_answer(),
Self::CardMod => tr.browsing_tooltip_card_modified(),
Self::Cards => tr.browsing_tooltip_card(),
Self::NoteMod => tr.browsing_tooltip_note_modified(),
Self::Notetype => tr.browsing_tooltip_notetype(),
Self::Question => tr.browsing_tooltip_question(),
_ => "".into(),
}
.into()
}
pub fn notes_mode_tooltip(self, i18n: &I18n) -> String {
pub fn notes_mode_tooltip(self, tr: &I18n) -> String {
match self {
Self::Cards => i18n.browsing_tooltip_cards(),
_ => return self.cards_mode_label(i18n),
Self::Cards => tr.browsing_tooltip_cards(),
_ => return self.cards_mode_label(tr),
}
.into()
}

View file

@ -93,42 +93,42 @@ where
pub fn summarize_output(&self, output: &mut MediaCheckOutput) -> String {
let mut buf = String::new();
let i = &self.ctx.tr;
let tr = &self.ctx.tr;
// top summary area
if output.trash_count > 0 {
let megs = (output.trash_bytes as f32) / 1024.0 / 1024.0;
buf += &i.media_check_trash_count(output.trash_count, megs);
buf += &tr.media_check_trash_count(output.trash_count, megs);
buf.push('\n');
}
buf += &i.media_check_missing_count(output.missing.len());
buf += &tr.media_check_missing_count(output.missing.len());
buf.push('\n');
buf += &i.media_check_unused_count(output.unused.len());
buf += &tr.media_check_unused_count(output.unused.len());
buf.push('\n');
if !output.renamed.is_empty() {
buf += &i.media_check_renamed_count(output.renamed.len());
buf += &tr.media_check_renamed_count(output.renamed.len());
buf.push('\n');
}
if !output.oversize.is_empty() {
buf += &i.media_check_oversize_count(output.oversize.len());
buf += &tr.media_check_oversize_count(output.oversize.len());
buf.push('\n');
}
if !output.dirs.is_empty() {
buf += &i.media_check_subfolder_count(output.dirs.len());
buf += &tr.media_check_subfolder_count(output.dirs.len());
buf.push('\n');
}
buf.push('\n');
if !output.renamed.is_empty() {
buf += &i.media_check_renamed_header();
buf += &tr.media_check_renamed_header();
buf.push('\n');
for (old, new) in &output.renamed {
buf += &without_unicode_isolation(
&i.media_check_renamed_file(old.as_str(), new.as_str()),
&tr.media_check_renamed_file(old.as_str(), new.as_str()),
);
buf.push('\n');
}
@ -137,10 +137,10 @@ where
if !output.oversize.is_empty() {
output.oversize.sort();
buf += &i.media_check_oversize_header();
buf += &tr.media_check_oversize_header();
buf.push('\n');
for fname in &output.oversize {
buf += &without_unicode_isolation(&i.media_check_oversize_file(fname.as_str()));
buf += &without_unicode_isolation(&tr.media_check_oversize_file(fname.as_str()));
buf.push('\n');
}
buf.push('\n')
@ -148,10 +148,10 @@ where
if !output.dirs.is_empty() {
output.dirs.sort();
buf += &i.media_check_subfolder_header();
buf += &tr.media_check_subfolder_header();
buf.push('\n');
for fname in &output.dirs {
buf += &without_unicode_isolation(&i.media_check_subfolder_file(fname.as_str()));
buf += &without_unicode_isolation(&tr.media_check_subfolder_file(fname.as_str()));
buf.push('\n');
}
buf.push('\n')
@ -159,10 +159,10 @@ where
if !output.missing.is_empty() {
output.missing.sort();
buf += &i.media_check_missing_header();
buf += &tr.media_check_missing_header();
buf.push('\n');
for fname in &output.missing {
buf += &without_unicode_isolation(&i.media_check_missing_file(fname.as_str()));
buf += &without_unicode_isolation(&tr.media_check_missing_file(fname.as_str()));
buf.push('\n');
}
buf.push('\n')
@ -170,10 +170,10 @@ where
if !output.unused.is_empty() {
output.unused.sort();
buf += &i.media_check_unused_header();
buf += &tr.media_check_unused_header();
buf.push('\n');
for fname in &output.unused {
buf += &without_unicode_isolation(&i.media_check_unused_file(fname.as_str()));
buf += &without_unicode_isolation(&tr.media_check_unused_file(fname.as_str()));
buf.push('\n');
}
}

View file

@ -9,7 +9,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import { createEventDispatcher } from "svelte";
import type { Stats } from "../lib/proto";
import type { PreferenceStore } from "../sveltelib/preferences";
import * as translate from "../lib/ftl";
import * as tr2 from "../lib/ftl";
import { defaultGraphBounds } from "./graph-helpers";
import type { SearchEventMap } from "./graph-helpers";
import { gatherData, renderCards } from "./card-counts";
@ -35,8 +35,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
tableData = renderCards(svg as any, bounds, graphData);
}
const label = translate.statisticsCountsSeparateSuspendedBuriedCards();
const total = translate.statisticsCountsTotalCards();
const label = tr2.statisticsCountsSeparateSuspendedBuriedCards();
const total = tr2.statisticsCountsTotalCards();
</script>
<Graph title={graphData.title}>