mirror of
https://github.com/ankitects/anki.git
synced 2025-12-20 10:22:57 -05:00
rework translation handling
Instead of generating a fluent.proto file with a giant enum, create a .json file representing the translations that downstream consumers can use for code generation. This enables the generation of a separate method for each translation, with a docstring that shows the actual text, and any required arguments listed in the function signature. The codebase is still using the old enum for now; updating it will need to come in future commits, and the old enum will need to be kept around, as add-ons are referencing it. Other changes: - move translation code into a separate crate - store the translations on a per-file/module basis, which will allow us to avoid sending 1000+ strings on each JS page load in the future - drop the undocumented support for external .ftl files, that we weren't using - duplicate strings in translation files are now checked for at build time - fix i18n test failing when run outside Bazel - drop slog dependency in i18n module
This commit is contained in:
parent
4b5944f181
commit
9aece2a7b8
55 changed files with 1673 additions and 1013 deletions
39
Cargo.lock
generated
39
Cargo.lock
generated
|
|
@ -50,6 +50,7 @@ name = "anki"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ammonia",
|
"ammonia",
|
||||||
|
"anki_i18n",
|
||||||
"askama",
|
"askama",
|
||||||
"async-compression",
|
"async-compression",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
|
|
@ -70,7 +71,6 @@ dependencies = [
|
||||||
"itertools",
|
"itertools",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"nom",
|
"nom",
|
||||||
"num-format",
|
|
||||||
"num-integer",
|
"num-integer",
|
||||||
"num_enum",
|
"num_enum",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
|
|
@ -105,6 +105,21 @@ dependencies = [
|
||||||
"zip",
|
"zip",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anki_i18n"
|
||||||
|
version = "0.0.0"
|
||||||
|
dependencies = [
|
||||||
|
"fluent",
|
||||||
|
"fluent-syntax",
|
||||||
|
"inflections",
|
||||||
|
"intl-memoizer",
|
||||||
|
"num-format",
|
||||||
|
"phf",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"unic-langid",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anki_workspace"
|
name = "anki_workspace"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
|
|
@ -1063,6 +1078,12 @@ dependencies = [
|
||||||
"unindent",
|
"unindent",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "inflections"
|
||||||
|
version = "1.1.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a257582fdcde896fd96463bf2d40eefea0580021c0712a0e2b028b60b47a837a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "instant"
|
name = "instant"
|
||||||
version = "0.1.9"
|
version = "0.1.9"
|
||||||
|
|
@ -1559,7 +1580,9 @@ version = "0.8.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3dfb61232e34fcb633f43d12c58f83c1df82962dcdfa565a4e866ffc17dafe12"
|
checksum = "3dfb61232e34fcb633f43d12c58f83c1df82962dcdfa565a4e866ffc17dafe12"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"phf_macros",
|
||||||
"phf_shared",
|
"phf_shared",
|
||||||
|
"proc-macro-hack",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -1582,6 +1605,20 @@ dependencies = [
|
||||||
"rand 0.7.3",
|
"rand 0.7.3",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "phf_macros"
|
||||||
|
version = "0.8.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7f6fde18ff429ffc8fe78e2bf7f8b7a5a5a6e2a8b58bc5a9ac69198bbda9189c"
|
||||||
|
dependencies = [
|
||||||
|
"phf_generator",
|
||||||
|
"phf_shared",
|
||||||
|
"proc-macro-hack",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "phf_shared"
|
name = "phf_shared"
|
||||||
version = "0.8.0"
|
version = "0.8.0"
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ authors = ["Ankitects Pty Ltd and contributors"]
|
||||||
license = "AGPL-3.0-or-later"
|
license = "AGPL-3.0-or-later"
|
||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
members = ["rslib", "pylib/rsbridge"]
|
members = ["rslib", "rslib/i18n", "pylib/rsbridge"]
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
# dummy top level for tooling
|
# dummy top level for tooling
|
||||||
|
|
|
||||||
|
|
@ -1071,6 +1071,16 @@ def raze_fetch_remote_crates():
|
||||||
build_file = Label("//cargo/remote:BUILD.indoc-impl-0.3.6.bazel"),
|
build_file = Label("//cargo/remote:BUILD.indoc-impl-0.3.6.bazel"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
maybe(
|
||||||
|
http_archive,
|
||||||
|
name = "raze__inflections__1_1_1",
|
||||||
|
url = "https://crates.io/api/v1/crates/inflections/1.1.1/download",
|
||||||
|
type = "tar.gz",
|
||||||
|
sha256 = "a257582fdcde896fd96463bf2d40eefea0580021c0712a0e2b028b60b47a837a",
|
||||||
|
strip_prefix = "inflections-1.1.1",
|
||||||
|
build_file = Label("//cargo/remote:BUILD.inflections-1.1.1.bazel"),
|
||||||
|
)
|
||||||
|
|
||||||
maybe(
|
maybe(
|
||||||
http_archive,
|
http_archive,
|
||||||
name = "raze__instant__0_1_9",
|
name = "raze__instant__0_1_9",
|
||||||
|
|
@ -1611,6 +1621,16 @@ def raze_fetch_remote_crates():
|
||||||
build_file = Label("//cargo/remote:BUILD.phf_generator-0.8.0.bazel"),
|
build_file = Label("//cargo/remote:BUILD.phf_generator-0.8.0.bazel"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
maybe(
|
||||||
|
http_archive,
|
||||||
|
name = "raze__phf_macros__0_8_0",
|
||||||
|
url = "https://crates.io/api/v1/crates/phf_macros/0.8.0/download",
|
||||||
|
type = "tar.gz",
|
||||||
|
sha256 = "7f6fde18ff429ffc8fe78e2bf7f8b7a5a5a6e2a8b58bc5a9ac69198bbda9189c",
|
||||||
|
strip_prefix = "phf_macros-0.8.0",
|
||||||
|
build_file = Label("//cargo/remote:BUILD.phf_macros-0.8.0.bazel"),
|
||||||
|
)
|
||||||
|
|
||||||
maybe(
|
maybe(
|
||||||
http_archive,
|
http_archive,
|
||||||
name = "raze__phf_shared__0_8_0",
|
name = "raze__phf_shared__0_8_0",
|
||||||
|
|
|
||||||
|
|
@ -53,6 +53,15 @@
|
||||||
"license_file": null,
|
"license_file": null,
|
||||||
"description": "Anki's Rust library code"
|
"description": "Anki's Rust library code"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "anki_i18n",
|
||||||
|
"version": "0.0.0",
|
||||||
|
"authors": "Ankitects Pty Ltd and contributors",
|
||||||
|
"repository": null,
|
||||||
|
"license": "AGPL-3.0-or-later",
|
||||||
|
"license_file": null,
|
||||||
|
"description": "Anki's Rust library i18n code"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "anki_workspace",
|
"name": "anki_workspace",
|
||||||
"version": "0.0.0",
|
"version": "0.0.0",
|
||||||
|
|
@ -971,6 +980,15 @@
|
||||||
"license_file": null,
|
"license_file": null,
|
||||||
"description": "Indented document literals"
|
"description": "Indented document literals"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "inflections",
|
||||||
|
"version": "1.1.1",
|
||||||
|
"authors": "Caleb Meredith <calebmeredith8@gmail.com>",
|
||||||
|
"repository": "https://docs.rs/inflections",
|
||||||
|
"license": "MIT",
|
||||||
|
"license_file": null,
|
||||||
|
"description": "High performance inflection transformation library for changing properties of words like the case."
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "instant",
|
"name": "instant",
|
||||||
"version": "0.1.9",
|
"version": "0.1.9",
|
||||||
|
|
@ -1457,6 +1475,15 @@
|
||||||
"license_file": null,
|
"license_file": null,
|
||||||
"description": "PHF generation logic"
|
"description": "PHF generation logic"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "phf_macros",
|
||||||
|
"version": "0.8.0",
|
||||||
|
"authors": "Steven Fackler <sfackler@gmail.com>",
|
||||||
|
"repository": "https://github.com/sfackler/rust-phf",
|
||||||
|
"license": "MIT",
|
||||||
|
"license_file": null,
|
||||||
|
"description": "Macros to generate types in the phf crate"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "phf_shared",
|
"name": "phf_shared",
|
||||||
"version": "0.8.0",
|
"version": "0.8.0",
|
||||||
|
|
|
||||||
53
cargo/remote/BUILD.inflections-1.1.1.bazel
vendored
Normal file
53
cargo/remote/BUILD.inflections-1.1.1.bazel
vendored
Normal file
|
|
@ -0,0 +1,53 @@
|
||||||
|
"""
|
||||||
|
@generated
|
||||||
|
cargo-raze crate build file.
|
||||||
|
|
||||||
|
DO NOT EDIT! Replaced on runs of cargo-raze
|
||||||
|
"""
|
||||||
|
|
||||||
|
# buildifier: disable=load
|
||||||
|
load(
|
||||||
|
"@io_bazel_rules_rust//rust:rust.bzl",
|
||||||
|
"rust_binary",
|
||||||
|
"rust_library",
|
||||||
|
"rust_test",
|
||||||
|
)
|
||||||
|
|
||||||
|
# buildifier: disable=load
|
||||||
|
load("@bazel_skylib//lib:selects.bzl", "selects")
|
||||||
|
|
||||||
|
package(default_visibility = [
|
||||||
|
# Public for visibility by "@raze__crate__version//" targets.
|
||||||
|
#
|
||||||
|
# Prefer access through "//cargo", which limits external
|
||||||
|
# visibility to explicit Cargo.toml dependencies.
|
||||||
|
"//visibility:public",
|
||||||
|
])
|
||||||
|
|
||||||
|
licenses([
|
||||||
|
"notice", # MIT from expression "MIT"
|
||||||
|
])
|
||||||
|
|
||||||
|
# Generated Targets
|
||||||
|
|
||||||
|
rust_library(
|
||||||
|
name = "inflections",
|
||||||
|
srcs = glob(["**/*.rs"]),
|
||||||
|
crate_features = [
|
||||||
|
],
|
||||||
|
crate_root = "src/lib.rs",
|
||||||
|
crate_type = "lib",
|
||||||
|
data = [],
|
||||||
|
edition = "2015",
|
||||||
|
rustc_flags = [
|
||||||
|
"--cap-lints=allow",
|
||||||
|
],
|
||||||
|
tags = [
|
||||||
|
"cargo-raze",
|
||||||
|
"manual",
|
||||||
|
],
|
||||||
|
version = "1.1.1",
|
||||||
|
# buildifier: leave-alone
|
||||||
|
deps = [
|
||||||
|
],
|
||||||
|
)
|
||||||
7
cargo/remote/BUILD.phf-0.8.0.bazel
vendored
7
cargo/remote/BUILD.phf-0.8.0.bazel
vendored
|
|
@ -35,12 +35,19 @@ rust_library(
|
||||||
srcs = glob(["**/*.rs"]),
|
srcs = glob(["**/*.rs"]),
|
||||||
crate_features = [
|
crate_features = [
|
||||||
"default",
|
"default",
|
||||||
|
"macros",
|
||||||
|
"phf_macros",
|
||||||
|
"proc-macro-hack",
|
||||||
"std",
|
"std",
|
||||||
],
|
],
|
||||||
crate_root = "src/lib.rs",
|
crate_root = "src/lib.rs",
|
||||||
crate_type = "lib",
|
crate_type = "lib",
|
||||||
data = [],
|
data = [],
|
||||||
edition = "2018",
|
edition = "2018",
|
||||||
|
proc_macro_deps = [
|
||||||
|
"@raze__phf_macros__0_8_0//:phf_macros",
|
||||||
|
"@raze__proc_macro_hack__0_5_19//:proc_macro_hack",
|
||||||
|
],
|
||||||
rustc_flags = [
|
rustc_flags = [
|
||||||
"--cap-lints=allow",
|
"--cap-lints=allow",
|
||||||
],
|
],
|
||||||
|
|
|
||||||
67
cargo/remote/BUILD.phf_macros-0.8.0.bazel
vendored
Normal file
67
cargo/remote/BUILD.phf_macros-0.8.0.bazel
vendored
Normal file
|
|
@ -0,0 +1,67 @@
|
||||||
|
"""
|
||||||
|
@generated
|
||||||
|
cargo-raze crate build file.
|
||||||
|
|
||||||
|
DO NOT EDIT! Replaced on runs of cargo-raze
|
||||||
|
"""
|
||||||
|
|
||||||
|
# buildifier: disable=load
|
||||||
|
load(
|
||||||
|
"@io_bazel_rules_rust//rust:rust.bzl",
|
||||||
|
"rust_binary",
|
||||||
|
"rust_library",
|
||||||
|
"rust_test",
|
||||||
|
)
|
||||||
|
|
||||||
|
# buildifier: disable=load
|
||||||
|
load("@bazel_skylib//lib:selects.bzl", "selects")
|
||||||
|
|
||||||
|
package(default_visibility = [
|
||||||
|
# Public for visibility by "@raze__crate__version//" targets.
|
||||||
|
#
|
||||||
|
# Prefer access through "//cargo", which limits external
|
||||||
|
# visibility to explicit Cargo.toml dependencies.
|
||||||
|
"//visibility:public",
|
||||||
|
])
|
||||||
|
|
||||||
|
licenses([
|
||||||
|
"notice", # MIT from expression "MIT"
|
||||||
|
])
|
||||||
|
|
||||||
|
# Generated Targets
|
||||||
|
|
||||||
|
# Unsupported target "bench" with type "bench" omitted
|
||||||
|
|
||||||
|
rust_library(
|
||||||
|
name = "phf_macros",
|
||||||
|
srcs = glob(["**/*.rs"]),
|
||||||
|
crate_features = [
|
||||||
|
],
|
||||||
|
crate_root = "src/lib.rs",
|
||||||
|
crate_type = "proc-macro",
|
||||||
|
data = [],
|
||||||
|
edition = "2018",
|
||||||
|
proc_macro_deps = [
|
||||||
|
"@raze__proc_macro_hack__0_5_19//:proc_macro_hack",
|
||||||
|
],
|
||||||
|
rustc_flags = [
|
||||||
|
"--cap-lints=allow",
|
||||||
|
],
|
||||||
|
tags = [
|
||||||
|
"cargo-raze",
|
||||||
|
"manual",
|
||||||
|
],
|
||||||
|
version = "0.8.0",
|
||||||
|
# buildifier: leave-alone
|
||||||
|
deps = [
|
||||||
|
"@raze__phf_generator__0_8_0//:phf_generator",
|
||||||
|
"@raze__phf_shared__0_8_0//:phf_shared",
|
||||||
|
"@raze__proc_macro2__1_0_24//:proc_macro2",
|
||||||
|
"@raze__quote__1_0_9//:quote",
|
||||||
|
"@raze__syn__1_0_63//:syn",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Unsupported target "compiletest" with type "test" omitted
|
||||||
|
|
||||||
|
# Unsupported target "test" with type "test" omitted
|
||||||
|
|
@ -12,14 +12,6 @@ py_proto_library_typed(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
py_proto_library_typed(
|
|
||||||
name = "fluent_pb2",
|
|
||||||
src = "//rslib:fluent.proto",
|
|
||||||
visibility = [
|
|
||||||
"//visibility:public",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
py_binary(
|
py_binary(
|
||||||
name = "genbackend",
|
name = "genbackend",
|
||||||
srcs = [
|
srcs = [
|
||||||
|
|
@ -40,6 +32,27 @@ genrule(
|
||||||
tools = ["genbackend"],
|
tools = ["genbackend"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
py_binary(
|
||||||
|
name = "genfluent",
|
||||||
|
srcs = [
|
||||||
|
"genfluent.py",
|
||||||
|
],
|
||||||
|
deps = [
|
||||||
|
requirement("black"),
|
||||||
|
requirement("stringcase"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
genrule(
|
||||||
|
name = "fluent_gen",
|
||||||
|
outs = ["fluent.py"],
|
||||||
|
cmd = "$(location genfluent) $(location //rslib/i18n:strings.json) $@",
|
||||||
|
tools = [
|
||||||
|
"genfluent",
|
||||||
|
"//rslib/i18n:strings.json",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
copy_file(
|
copy_file(
|
||||||
name = "rsbridge_unix",
|
name = "rsbridge_unix",
|
||||||
src = "//pylib/rsbridge",
|
src = "//pylib/rsbridge",
|
||||||
|
|
@ -82,7 +95,7 @@ filegroup(
|
||||||
"__init__.py",
|
"__init__.py",
|
||||||
"rsbridge.pyi",
|
"rsbridge.pyi",
|
||||||
":backend_pb2",
|
":backend_pb2",
|
||||||
":fluent_pb2",
|
":fluent_gen",
|
||||||
":rsbackend_gen",
|
":rsbackend_gen",
|
||||||
":rsbridge",
|
":rsbridge",
|
||||||
],
|
],
|
||||||
|
|
|
||||||
|
|
@ -3,19 +3,19 @@
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
|
||||||
from typing import Any, Dict, List, Optional, Sequence, Union
|
from typing import Any, Dict, List, Optional, Sequence, Union
|
||||||
|
from weakref import ref
|
||||||
|
|
||||||
import anki.buildinfo
|
import anki.buildinfo
|
||||||
from anki._backend.generated import RustBackendGenerated
|
from anki._backend.generated import RustBackendGenerated
|
||||||
from anki.dbproxy import Row as DBRow
|
from anki.dbproxy import Row as DBRow
|
||||||
from anki.dbproxy import ValueForDB
|
from anki.dbproxy import ValueForDB
|
||||||
from anki.errors import backend_exception_to_pylib
|
from anki.errors import backend_exception_to_pylib
|
||||||
from anki.lang import TR, FormatTimeSpan
|
|
||||||
from anki.utils import from_json_bytes, to_json_bytes
|
from anki.utils import from_json_bytes, to_json_bytes
|
||||||
|
|
||||||
from . import backend_pb2 as pb
|
from . import backend_pb2 as pb
|
||||||
from . import rsbridge
|
from . import rsbridge
|
||||||
|
from .fluent import GeneratedTranslations, LegacyTranslationEnum
|
||||||
|
|
||||||
# pylint: disable=c-extension-no-member
|
# pylint: disable=c-extension-no-member
|
||||||
assert rsbridge.buildhash() == anki.buildinfo.buildhash
|
assert rsbridge.buildhash() == anki.buildinfo.buildhash
|
||||||
|
|
@ -37,18 +37,14 @@ class RustBackend(RustBackendGenerated):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
ftl_folder: Optional[str] = None,
|
|
||||||
langs: Optional[List[str]] = None,
|
langs: Optional[List[str]] = None,
|
||||||
server: bool = False,
|
server: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
# pick up global defaults if not provided
|
# pick up global defaults if not provided
|
||||||
if ftl_folder is None:
|
|
||||||
ftl_folder = os.path.join(anki.lang.locale_folder, "fluent")
|
|
||||||
if langs is None:
|
if langs is None:
|
||||||
langs = [anki.lang.currentLang]
|
langs = [anki.lang.currentLang]
|
||||||
|
|
||||||
init_msg = pb.BackendInit(
|
init_msg = pb.BackendInit(
|
||||||
locale_folder_path=ftl_folder,
|
|
||||||
preferred_langs=langs,
|
preferred_langs=langs,
|
||||||
server=server,
|
server=server,
|
||||||
)
|
)
|
||||||
|
|
@ -82,13 +78,16 @@ class RustBackend(RustBackendGenerated):
|
||||||
err.ParseFromString(err_bytes)
|
err.ParseFromString(err_bytes)
|
||||||
raise backend_exception_to_pylib(err)
|
raise backend_exception_to_pylib(err)
|
||||||
|
|
||||||
def translate(self, key: TR.V, **kwargs: Union[str, int, float]) -> str:
|
def translate(
|
||||||
return self.translate_string(translate_string_in(key, **kwargs))
|
self, key: Union[LegacyTranslationEnum, int], **kwargs: Union[str, int, float]
|
||||||
|
) -> str:
|
||||||
|
int_key = key if isinstance(key, int) else key.value
|
||||||
|
return self.translate_string(translate_string_in(key=int_key, **kwargs))
|
||||||
|
|
||||||
def format_time_span(
|
def format_time_span(
|
||||||
self,
|
self,
|
||||||
seconds: float,
|
seconds: Any,
|
||||||
context: FormatTimeSpan.Context.V = FormatTimeSpan.INTERVALS,
|
context: Any = 2,
|
||||||
) -> str:
|
) -> str:
|
||||||
print(
|
print(
|
||||||
"please use col.format_timespan() instead of col.backend.format_time_span()"
|
"please use col.format_timespan() instead of col.backend.format_time_span()"
|
||||||
|
|
@ -107,7 +106,7 @@ class RustBackend(RustBackendGenerated):
|
||||||
|
|
||||||
|
|
||||||
def translate_string_in(
|
def translate_string_in(
|
||||||
key: TR.V, **kwargs: Union[str, int, float]
|
key: int, **kwargs: Union[str, int, float]
|
||||||
) -> pb.TranslateStringIn:
|
) -> pb.TranslateStringIn:
|
||||||
args = {}
|
args = {}
|
||||||
for (k, v) in kwargs.items():
|
for (k, v) in kwargs.items():
|
||||||
|
|
@ -116,3 +115,17 @@ def translate_string_in(
|
||||||
else:
|
else:
|
||||||
args[k] = pb.TranslateArgValue(number=v)
|
args[k] = pb.TranslateArgValue(number=v)
|
||||||
return pb.TranslateStringIn(key=key, args=args)
|
return pb.TranslateStringIn(key=key, args=args)
|
||||||
|
|
||||||
|
|
||||||
|
class Translations(GeneratedTranslations):
|
||||||
|
def __init__(self, backend: ref["anki._backend.RustBackend"]):
|
||||||
|
self._backend = backend
|
||||||
|
|
||||||
|
def __call__(self, *args: Any, **kwargs: Any) -> str:
|
||||||
|
"Mimic the old col.tr(TR....) interface"
|
||||||
|
return self._backend().translate(*args, **kwargs)
|
||||||
|
|
||||||
|
def _translate(
|
||||||
|
self, module: int, translation: int, args: Dict[str, Union[str, int, float]]
|
||||||
|
) -> str:
|
||||||
|
return self._backend().translate(module * 1000 + translation, **args)
|
||||||
|
|
|
||||||
1
pylib/anki/_backend/fluent.py
Symbolic link
1
pylib/anki/_backend/fluent.py
Symbolic link
|
|
@ -0,0 +1 @@
|
||||||
|
../../../bazel-bin/pylib/anki/_backend/fluent.py
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
../../../bazel-bin/pylib/anki/_backend/fluent_pb2.pyi
|
|
||||||
82
pylib/anki/_backend/genfluent.py
Normal file
82
pylib/anki/_backend/genfluent.py
Normal file
|
|
@ -0,0 +1,82 @@
|
||||||
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
import stringcase
|
||||||
|
|
||||||
|
strings_json, outfile = sys.argv[1:]
|
||||||
|
modules = json.load(open(strings_json))
|
||||||
|
|
||||||
|
|
||||||
|
def legacy_enum() -> str:
|
||||||
|
out = ["class LegacyTranslationEnum(enum.Enum):"]
|
||||||
|
for module in modules:
|
||||||
|
for translation in module["translations"]:
|
||||||
|
key = stringcase.constcase(translation["key"])
|
||||||
|
value = module["index"] * 1000 + translation["index"]
|
||||||
|
out.append(f" {key} = {value}")
|
||||||
|
|
||||||
|
return "\n".join(out) + "\n"
|
||||||
|
|
||||||
|
|
||||||
|
def methods() -> str:
|
||||||
|
out = [
|
||||||
|
"class GeneratedTranslations:",
|
||||||
|
" def _translate(self, module: int, translation: int, args: Dict) -> str:",
|
||||||
|
" raise Exception('not implemented')",
|
||||||
|
]
|
||||||
|
for module in modules:
|
||||||
|
for translation in module["translations"]:
|
||||||
|
key = translation["key"].replace("-", "_")
|
||||||
|
arg_types = get_arg_types(translation["variables"])
|
||||||
|
args = get_args(translation["variables"])
|
||||||
|
doc = translation["text"]
|
||||||
|
out.append(
|
||||||
|
f"""
|
||||||
|
def {key}(self, {arg_types}) -> str:
|
||||||
|
r''' {doc} '''
|
||||||
|
return self._translate({module["index"]}, {translation["index"]}, {{{args}}})
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
return "\n".join(out) + "\n"
|
||||||
|
|
||||||
|
|
||||||
|
def get_arg_types(args: List[str]) -> str:
|
||||||
|
return ", ".join([f"{stringcase.snakecase(arg)}: FluentVariable" for arg in args])
|
||||||
|
|
||||||
|
|
||||||
|
def get_args(args: List[str]) -> str:
|
||||||
|
return ", ".join([f'"{arg}": {stringcase.snakecase(arg)}' for arg in args])
|
||||||
|
|
||||||
|
|
||||||
|
out = ""
|
||||||
|
|
||||||
|
out += legacy_enum()
|
||||||
|
out += methods()
|
||||||
|
|
||||||
|
|
||||||
|
open(outfile, "wb").write(
|
||||||
|
(
|
||||||
|
'''# Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
# pylint: skip-file
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
"""
|
||||||
|
This file is automatically generated from the *.ftl files.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import enum
|
||||||
|
from typing import Dict, Union
|
||||||
|
|
||||||
|
FluentVariable = Union[str, int, float]
|
||||||
|
|
||||||
|
'''
|
||||||
|
+ out
|
||||||
|
).encode("utf8")
|
||||||
|
)
|
||||||
|
|
@ -33,7 +33,7 @@ from dataclasses import dataclass, field
|
||||||
|
|
||||||
import anki.latex
|
import anki.latex
|
||||||
from anki import hooks
|
from anki import hooks
|
||||||
from anki._backend import RustBackend
|
from anki._backend import RustBackend, Translations
|
||||||
from anki.cards import Card
|
from anki.cards import Card
|
||||||
from anki.config import Config, ConfigManager
|
from anki.config import Config, ConfigManager
|
||||||
from anki.consts import *
|
from anki.consts import *
|
||||||
|
|
@ -101,6 +101,7 @@ class Collection:
|
||||||
self.path = os.path.abspath(path)
|
self.path = os.path.abspath(path)
|
||||||
self.reopen()
|
self.reopen()
|
||||||
|
|
||||||
|
self.tr = Translations(weakref.ref(self._backend))
|
||||||
self.media = MediaManager(self, server)
|
self.media = MediaManager(self, server)
|
||||||
self.models = ModelManager(self)
|
self.models = ModelManager(self)
|
||||||
self.decks = DeckManager(self)
|
self.decks = DeckManager(self)
|
||||||
|
|
@ -127,9 +128,6 @@ class Collection:
|
||||||
# I18n/messages
|
# I18n/messages
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
||||||
def tr(self, key: TR.V, **kwargs: Union[str, int, float]) -> str:
|
|
||||||
return self._backend.translate(key, **kwargs)
|
|
||||||
|
|
||||||
def format_timespan(
|
def format_timespan(
|
||||||
self,
|
self,
|
||||||
seconds: float,
|
seconds: float,
|
||||||
|
|
|
||||||
|
|
@ -9,12 +9,12 @@ from typing import Any, Optional, Tuple
|
||||||
|
|
||||||
import anki
|
import anki
|
||||||
import anki._backend.backend_pb2 as _pb
|
import anki._backend.backend_pb2 as _pb
|
||||||
import anki._backend.fluent_pb2 as _fluent_pb
|
|
||||||
|
|
||||||
# public exports
|
# public exports
|
||||||
TR = _fluent_pb.FluentString
|
TR = anki._backend.LegacyTranslationEnum
|
||||||
FormatTimeSpan = _pb.FormatTimespanIn
|
FormatTimeSpan = _pb.FormatTimespanIn
|
||||||
|
|
||||||
|
|
||||||
langs = sorted(
|
langs = sorted(
|
||||||
[
|
[
|
||||||
("Afrikaans", "af_ZA"),
|
("Afrikaans", "af_ZA"),
|
||||||
|
|
@ -150,9 +150,6 @@ currentLang = "en"
|
||||||
# the current Fluent translation instance
|
# the current Fluent translation instance
|
||||||
current_i18n: Optional[anki._backend.RustBackend] = None
|
current_i18n: Optional[anki._backend.RustBackend] = None
|
||||||
|
|
||||||
# path to locale folder
|
|
||||||
locale_folder = ""
|
|
||||||
|
|
||||||
|
|
||||||
def _(str: str) -> str:
|
def _(str: str) -> str:
|
||||||
print(f"gettext _() is deprecated: {str}")
|
print(f"gettext _() is deprecated: {str}")
|
||||||
|
|
@ -172,11 +169,10 @@ def tr_legacyglobal(*args: Any, **kwargs: Any) -> str:
|
||||||
return "tr_legacyglobal() called without active backend"
|
return "tr_legacyglobal() called without active backend"
|
||||||
|
|
||||||
|
|
||||||
def set_lang(lang: str, locale_dir: str) -> None:
|
def set_lang(lang: str) -> None:
|
||||||
global currentLang, current_i18n, locale_folder
|
global currentLang, current_i18n
|
||||||
currentLang = lang
|
currentLang = lang
|
||||||
current_i18n = anki._backend.RustBackend(ftl_folder=locale_folder, langs=[lang])
|
current_i18n = anki._backend.RustBackend(langs=[lang])
|
||||||
locale_folder = locale_dir
|
|
||||||
|
|
||||||
|
|
||||||
def get_def_lang(lang: Optional[str] = None) -> Tuple[int, str]:
|
def get_def_lang(lang: Optional[str] = None) -> Tuple[int, str]:
|
||||||
|
|
|
||||||
|
|
@ -209,10 +209,9 @@ def setupLangAndBackend(
|
||||||
lang = force or pm.meta["defaultLang"]
|
lang = force or pm.meta["defaultLang"]
|
||||||
lang = anki.lang.lang_to_disk_lang(lang)
|
lang = anki.lang.lang_to_disk_lang(lang)
|
||||||
|
|
||||||
ldir = locale_dir()
|
|
||||||
if not firstTime:
|
if not firstTime:
|
||||||
# set active language
|
# set active language
|
||||||
anki.lang.set_lang(lang, ldir)
|
anki.lang.set_lang(lang)
|
||||||
|
|
||||||
# switch direction for RTL languages
|
# switch direction for RTL languages
|
||||||
if anki.lang.is_rtl(lang):
|
if anki.lang.is_rtl(lang):
|
||||||
|
|
@ -465,7 +464,7 @@ def _run(argv: Optional[List[str]] = None, exec: bool = True) -> Optional[AnkiAp
|
||||||
|
|
||||||
# default to specified/system language before getting user's preference so that we can localize some more strings
|
# default to specified/system language before getting user's preference so that we can localize some more strings
|
||||||
lang = anki.lang.get_def_lang(opts.lang)
|
lang = anki.lang.get_def_lang(opts.lang)
|
||||||
anki.lang.set_lang(lang[1], locale_dir())
|
anki.lang.set_lang(lang[1])
|
||||||
|
|
||||||
# profile manager
|
# profile manager
|
||||||
pm = None
|
pm = None
|
||||||
|
|
|
||||||
|
|
@ -24,7 +24,7 @@ from anki.sync import SyncAuth
|
||||||
from anki.utils import intTime, isMac, isWin
|
from anki.utils import intTime, isMac, isWin
|
||||||
from aqt import appHelpSite
|
from aqt import appHelpSite
|
||||||
from aqt.qt import *
|
from aqt.qt import *
|
||||||
from aqt.utils import TR, disable_help_button, locale_dir, showWarning, tr
|
from aqt.utils import TR, disable_help_button, showWarning, tr
|
||||||
|
|
||||||
# Profile handling
|
# Profile handling
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
@ -563,7 +563,7 @@ create table if not exists profiles
|
||||||
sql = "update profiles set data = ? where name = ?"
|
sql = "update profiles set data = ? where name = ?"
|
||||||
self.db.execute(sql, self._pickle(self.meta), "_global")
|
self.db.execute(sql, self._pickle(self.meta), "_global")
|
||||||
self.db.commit()
|
self.db.commit()
|
||||||
anki.lang.set_lang(code, locale_dir())
|
anki.lang.set_lang(code)
|
||||||
|
|
||||||
# OpenGL
|
# OpenGL
|
||||||
######################################################################
|
######################################################################
|
||||||
|
|
|
||||||
|
|
@ -123,7 +123,7 @@ class SidebarItem:
|
||||||
|
|
||||||
def add_simple(
|
def add_simple(
|
||||||
self,
|
self,
|
||||||
name: Union[str, TR.V],
|
name: Union[str, TR],
|
||||||
icon: Union[str, ColoredIcon],
|
icon: Union[str, ColoredIcon],
|
||||||
type: SidebarItemType,
|
type: SidebarItemType,
|
||||||
search_node: Optional[SearchNode],
|
search_node: Optional[SearchNode],
|
||||||
|
|
@ -270,7 +270,7 @@ class SidebarModel(QAbstractItemModel):
|
||||||
|
|
||||||
|
|
||||||
class SidebarToolbar(QToolBar):
|
class SidebarToolbar(QToolBar):
|
||||||
_tools: Tuple[Tuple[SidebarTool, str, TR.V], ...] = (
|
_tools: Tuple[Tuple[SidebarTool, str, TR], ...] = (
|
||||||
(SidebarTool.SEARCH, ":/icons/magnifying_glass.svg", TR.ACTIONS_SEARCH),
|
(SidebarTool.SEARCH, ":/icons/magnifying_glass.svg", TR.ACTIONS_SEARCH),
|
||||||
(SidebarTool.SELECT, ":/icons/select.svg", TR.ACTIONS_SELECT),
|
(SidebarTool.SELECT, ":/icons/select.svg", TR.ACTIONS_SELECT),
|
||||||
)
|
)
|
||||||
|
|
@ -725,7 +725,7 @@ class SidebarTreeView(QTreeView):
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
root: SidebarItem,
|
root: SidebarItem,
|
||||||
name: TR.V,
|
name: TR,
|
||||||
icon: Union[str, ColoredIcon],
|
icon: Union[str, ColoredIcon],
|
||||||
collapse_key: Config.Bool.Key.V,
|
collapse_key: Config.Bool.Key.V,
|
||||||
type: Optional[SidebarItemType] = None,
|
type: Optional[SidebarItemType] = None,
|
||||||
|
|
|
||||||
|
|
@ -67,7 +67,7 @@ def locale_dir() -> str:
|
||||||
return os.path.join(aqt_data_folder(), "locale")
|
return os.path.join(aqt_data_folder(), "locale")
|
||||||
|
|
||||||
|
|
||||||
def tr(key: TR.V, **kwargs: Union[str, int, float]) -> str:
|
def tr(key: TR, **kwargs: Union[str, int, float]) -> str:
|
||||||
"Shortcut to access Fluent translations."
|
"Shortcut to access Fluent translations."
|
||||||
return anki.lang.current_i18n.translate(key, **kwargs)
|
return anki.lang.current_i18n.translate(key, **kwargs)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -3,11 +3,11 @@ from anki.lang import TR
|
||||||
|
|
||||||
|
|
||||||
def test_no_collection_i18n():
|
def test_no_collection_i18n():
|
||||||
anki.lang.set_lang("zz", "")
|
anki.lang.set_lang("zz")
|
||||||
tr2 = anki.lang.current_i18n.translate
|
tr2 = anki.lang.current_i18n.translate
|
||||||
no_uni = anki.lang.without_unicode_isolation
|
no_uni = anki.lang.without_unicode_isolation
|
||||||
assert no_uni(tr2(TR.STATISTICS_REVIEWS, reviews=2)) == "2 reviews"
|
assert no_uni(tr2(TR.STATISTICS_REVIEWS, reviews=2)) == "2 reviews"
|
||||||
|
|
||||||
anki.lang.set_lang("ja", "")
|
anki.lang.set_lang("ja")
|
||||||
tr2 = anki.lang.current_i18n.translate
|
tr2 = anki.lang.current_i18n.translate
|
||||||
assert no_uni(tr2(TR.STATISTICS_REVIEWS, reviews=2)) == "2 枚の復習カード"
|
assert no_uni(tr2(TR.STATISTICS_REVIEWS, reviews=2)) == "2 枚の復習カード"
|
||||||
|
|
|
||||||
|
|
@ -93,7 +93,6 @@ rust_library(
|
||||||
"//rslib/cargo:lazy_static",
|
"//rslib/cargo:lazy_static",
|
||||||
"//rslib/cargo:nom",
|
"//rslib/cargo:nom",
|
||||||
"//rslib/cargo:num_enum",
|
"//rslib/cargo:num_enum",
|
||||||
"//rslib/cargo:num_format",
|
|
||||||
"//rslib/cargo:num_integer",
|
"//rslib/cargo:num_integer",
|
||||||
"//rslib/cargo:once_cell",
|
"//rslib/cargo:once_cell",
|
||||||
"//rslib/cargo:pin_project",
|
"//rslib/cargo:pin_project",
|
||||||
|
|
@ -121,6 +120,7 @@ rust_library(
|
||||||
"//rslib/cargo:unicode_normalization",
|
"//rslib/cargo:unicode_normalization",
|
||||||
"//rslib/cargo:utime",
|
"//rslib/cargo:utime",
|
||||||
"//rslib/cargo:zip",
|
"//rslib/cargo:zip",
|
||||||
|
"//rslib/i18n:anki_i18n",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -128,7 +128,7 @@ rust_library(
|
||||||
#######################
|
#######################
|
||||||
|
|
||||||
rust_test(
|
rust_test(
|
||||||
name = "unit_tests",
|
name = "anki_tests",
|
||||||
compile_data = _anki_compile_data,
|
compile_data = _anki_compile_data,
|
||||||
crate = ":anki",
|
crate = ":anki",
|
||||||
crate_features = _anki_features,
|
crate_features = _anki_features,
|
||||||
|
|
@ -136,7 +136,10 @@ rust_test(
|
||||||
"tests/support/**",
|
"tests/support/**",
|
||||||
]),
|
]),
|
||||||
rustc_env = _anki_rustc_env,
|
rustc_env = _anki_rustc_env,
|
||||||
deps = ["//rslib/cargo:env_logger"],
|
deps = [
|
||||||
|
"//rslib/cargo:env_logger",
|
||||||
|
"//rslib/i18n:anki_i18n",
|
||||||
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
rustfmt_test(
|
rustfmt_test(
|
||||||
|
|
@ -163,45 +166,8 @@ proto_format(
|
||||||
srcs = ["backend.proto"],
|
srcs = ["backend.proto"],
|
||||||
)
|
)
|
||||||
|
|
||||||
# fluent.proto generation
|
# backend.proto
|
||||||
###########################
|
#######################
|
||||||
# This separate step is required to make the file available to downstream consumers.
|
|
||||||
|
|
||||||
rust_binary(
|
|
||||||
name = "write_fluent_proto",
|
|
||||||
srcs = [
|
|
||||||
"build/mergeftl.rs",
|
|
||||||
"build/write_fluent_proto.rs",
|
|
||||||
],
|
|
||||||
deps = ["//rslib/cargo:fluent_syntax"],
|
|
||||||
)
|
|
||||||
|
|
||||||
genrule(
|
|
||||||
name = "fluent_proto",
|
|
||||||
srcs = [
|
|
||||||
"//ftl",
|
|
||||||
"//ftl:BUILD.bazel",
|
|
||||||
"//rslib/cargo:fluent_syntax",
|
|
||||||
"@rslib_ftl//:l10n.toml",
|
|
||||||
"@extra_ftl//:l10n.toml",
|
|
||||||
],
|
|
||||||
outs = ["fluent.proto"],
|
|
||||||
cmd = """\
|
|
||||||
RSLIB_FTL_ROOT="$(location @rslib_ftl//:l10n.toml)" \
|
|
||||||
EXTRA_FTL_ROOT="$(location @extra_ftl//:l10n.toml)" \
|
|
||||||
FTL_SRC="$(location //ftl:BUILD.bazel)" \
|
|
||||||
$(location :write_fluent_proto) $(location fluent.proto)""",
|
|
||||||
tools = [
|
|
||||||
":write_fluent_proto",
|
|
||||||
],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
)
|
|
||||||
|
|
||||||
proto_library(
|
|
||||||
name = "fluent_proto_lib",
|
|
||||||
srcs = ["fluent.proto"],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
)
|
|
||||||
|
|
||||||
proto_library(
|
proto_library(
|
||||||
name = "backend_proto_lib",
|
name = "backend_proto_lib",
|
||||||
|
|
|
||||||
|
|
@ -30,6 +30,9 @@ proc-macro-nested = "=0.1.6"
|
||||||
# as cargo-raze doesn't seem to be included the rustversion crate.
|
# as cargo-raze doesn't seem to be included the rustversion crate.
|
||||||
slog-term = "=2.6.0"
|
slog-term = "=2.6.0"
|
||||||
|
|
||||||
|
anki_i18n = { path = "i18n" }
|
||||||
|
|
||||||
|
|
||||||
askama = "0.10.1"
|
askama = "0.10.1"
|
||||||
async-compression = { version = "0.3.5", features = ["stream", "gzip"] }
|
async-compression = { version = "0.3.5", features = ["stream", "gzip"] }
|
||||||
blake3 = "0.3.5"
|
blake3 = "0.3.5"
|
||||||
|
|
@ -47,7 +50,6 @@ itertools = "0.9.0"
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
nom = "6.0.1"
|
nom = "6.0.1"
|
||||||
num_enum = "0.5.0"
|
num_enum = "0.5.0"
|
||||||
num-format = "0.4.0"
|
|
||||||
num-integer = "0.1.43"
|
num-integer = "0.1.43"
|
||||||
once_cell = "1.4.1"
|
once_cell = "1.4.1"
|
||||||
pin-project = "1"
|
pin-project = "1"
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,9 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
pub mod mergeftl;
|
|
||||||
pub mod protobuf;
|
pub mod protobuf;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
mergeftl::write_ftl_files_and_fluent_rs();
|
|
||||||
protobuf::write_backend_proto_rs();
|
protobuf::write_backend_proto_rs();
|
||||||
|
|
||||||
// when building with cargo (eg for rust analyzer), generate a dummy BUILDINFO
|
// when building with cargo (eg for rust analyzer), generate a dummy BUILDINFO
|
||||||
|
|
|
||||||
|
|
@ -1,273 +0,0 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|
||||||
|
|
||||||
use fluent_syntax::ast::Entry;
|
|
||||||
use fluent_syntax::parser::Parser;
|
|
||||||
use std::path::Path;
|
|
||||||
use std::{collections::HashMap, env};
|
|
||||||
use std::{fs, path::PathBuf};
|
|
||||||
|
|
||||||
fn get_identifiers(ftl_text: &str) -> Vec<String> {
|
|
||||||
let res = Parser::new(ftl_text).parse().unwrap();
|
|
||||||
let mut idents = vec![];
|
|
||||||
|
|
||||||
for entry in res.body {
|
|
||||||
if let Entry::Message(m) = entry {
|
|
||||||
idents.push(m.id.name.to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
idents.sort_unstable();
|
|
||||||
|
|
||||||
idents
|
|
||||||
}
|
|
||||||
|
|
||||||
fn proto_enum(idents: &[String]) -> String {
|
|
||||||
let mut buf = String::from(
|
|
||||||
r#"// This file is automatically generated as part of the build process.
|
|
||||||
|
|
||||||
syntax = "proto3";
|
|
||||||
package FluentProto;
|
|
||||||
enum FluentString {
|
|
||||||
"#,
|
|
||||||
);
|
|
||||||
for (idx, s) in idents.iter().enumerate() {
|
|
||||||
let name = s.replace("-", "_").to_uppercase();
|
|
||||||
buf += &format!(" {} = {};\n", name, idx);
|
|
||||||
}
|
|
||||||
|
|
||||||
buf += "}\n";
|
|
||||||
|
|
||||||
buf
|
|
||||||
}
|
|
||||||
|
|
||||||
fn rust_string_vec(idents: &[String]) -> String {
|
|
||||||
let mut buf = String::from(
|
|
||||||
r#"// This file is automatically generated as part of the build process.
|
|
||||||
|
|
||||||
pub(super) const FLUENT_KEYS: &[&str] = &[
|
|
||||||
"#,
|
|
||||||
);
|
|
||||||
|
|
||||||
for s in idents {
|
|
||||||
buf += &format!(" \"{}\",\n", s);
|
|
||||||
}
|
|
||||||
|
|
||||||
buf += "];\n";
|
|
||||||
|
|
||||||
buf
|
|
||||||
}
|
|
||||||
|
|
||||||
struct FTLData {
|
|
||||||
templates: Vec<String>,
|
|
||||||
/// lang -> [FileContent]
|
|
||||||
translations: HashMap<String, Vec<String>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FTLData {
|
|
||||||
fn add_language_folder(&mut self, folder: &Path) {
|
|
||||||
let lang = folder.file_name().unwrap().to_str().unwrap();
|
|
||||||
let list = self.translations.entry(lang.to_string()).or_default();
|
|
||||||
for entry in fs::read_dir(&folder).unwrap() {
|
|
||||||
let entry = entry.unwrap();
|
|
||||||
let text = fs::read_to_string(&entry.path()).unwrap();
|
|
||||||
assert!(
|
|
||||||
text.ends_with('\n'),
|
|
||||||
"file was missing final newline: {:?}",
|
|
||||||
entry
|
|
||||||
);
|
|
||||||
list.push(text);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_template_folder(&mut self, folder: &Path) {
|
|
||||||
for entry in fs::read_dir(&folder).unwrap() {
|
|
||||||
let entry = entry.unwrap();
|
|
||||||
let text = fs::read_to_string(&entry.path()).unwrap();
|
|
||||||
assert!(
|
|
||||||
text.ends_with('\n'),
|
|
||||||
"file was missing final newline: {:?}",
|
|
||||||
entry
|
|
||||||
);
|
|
||||||
self.templates.push(text);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_ftl_data() -> FTLData {
|
|
||||||
let mut data = get_ftl_data_from_source_tree();
|
|
||||||
|
|
||||||
let rslib_l10n = std::env::var("RSLIB_FTL_ROOT").ok();
|
|
||||||
let extra_l10n = std::env::var("EXTRA_FTL_ROOT").ok();
|
|
||||||
|
|
||||||
// core translations provided?
|
|
||||||
if let Some(path) = rslib_l10n {
|
|
||||||
let path = Path::new(&path);
|
|
||||||
let core_folder = path.with_file_name("core");
|
|
||||||
for entry in fs::read_dir(&core_folder).unwrap() {
|
|
||||||
let entry = entry.unwrap();
|
|
||||||
if entry.file_name().to_str().unwrap() == "templates" {
|
|
||||||
// ignore source ftl files, as we've already extracted them from the source tree
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
data.add_language_folder(&entry.path());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// extra templates/translations provided?
|
|
||||||
if let Some(path) = extra_l10n {
|
|
||||||
let mut path = PathBuf::from(path);
|
|
||||||
// drop l10n.toml filename to get folder
|
|
||||||
path.pop();
|
|
||||||
// look for subfolders
|
|
||||||
for outer_entry in fs::read_dir(&path).unwrap() {
|
|
||||||
let outer_entry = outer_entry.unwrap();
|
|
||||||
if outer_entry.file_type().unwrap().is_dir() {
|
|
||||||
// process folder
|
|
||||||
for entry in fs::read_dir(&outer_entry.path()).unwrap() {
|
|
||||||
let entry = entry.unwrap();
|
|
||||||
if entry.file_name().to_str().unwrap() == "templates" {
|
|
||||||
if include_local_qt_templates() {
|
|
||||||
// ignore source ftl files, as we've already extracted them from the source tree
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
data.add_template_folder(&entry.path());
|
|
||||||
} else {
|
|
||||||
data.add_language_folder(&entry.path());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
data
|
|
||||||
}
|
|
||||||
|
|
||||||
/// In a standard build, the ftl/qt folder is used as the source
|
|
||||||
/// of truth for @extra_ftl, making it easier to add new strings.
|
|
||||||
/// If the Qt templates are not desired, the NO_QT_TEMPLATES env
|
|
||||||
/// var can be set to skip them.
|
|
||||||
fn include_local_qt_templates() -> bool {
|
|
||||||
env::var("NO_QT_TEMPLATES").is_err()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Extracts English text from ftl folder in source tree.
|
|
||||||
fn get_ftl_data_from_source_tree() -> FTLData {
|
|
||||||
let mut templates: Vec<String> = vec![];
|
|
||||||
|
|
||||||
let ftl_base = if let Ok(srcfile) = env::var("FTL_SRC") {
|
|
||||||
let mut path = PathBuf::from(srcfile);
|
|
||||||
path.pop();
|
|
||||||
path
|
|
||||||
} else {
|
|
||||||
PathBuf::from("../ftl")
|
|
||||||
};
|
|
||||||
|
|
||||||
let dir = ftl_base.join("core");
|
|
||||||
for entry in fs::read_dir(dir).unwrap() {
|
|
||||||
let entry = entry.unwrap();
|
|
||||||
let fname = entry.file_name().into_string().unwrap();
|
|
||||||
if fname.ends_with(".ftl") {
|
|
||||||
templates.push(fs::read_to_string(entry.path()).unwrap());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if include_local_qt_templates() {
|
|
||||||
let dir = ftl_base.join("qt");
|
|
||||||
for entry in fs::read_dir(dir).unwrap() {
|
|
||||||
let entry = entry.unwrap();
|
|
||||||
let fname = entry.file_name().into_string().unwrap();
|
|
||||||
if fname.ends_with(".ftl") {
|
|
||||||
templates.push(fs::read_to_string(entry.path()).unwrap());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
FTLData {
|
|
||||||
templates,
|
|
||||||
translations: Default::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Map of lang->content; Template lang is "template".
|
|
||||||
fn merge_ftl_data(data: FTLData) -> HashMap<String, String> {
|
|
||||||
data.translations
|
|
||||||
.into_iter()
|
|
||||||
.map(|(lang, content)| (lang, content.join("\n")))
|
|
||||||
.chain(std::iter::once((
|
|
||||||
"template".to_string(),
|
|
||||||
data.templates.join("\n"),
|
|
||||||
)))
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn write_merged_ftl_files(dir: &Path, data: &HashMap<String, String>) {
|
|
||||||
for (lang, content) in data {
|
|
||||||
let path = dir.join(format!("{}.ftl", lang));
|
|
||||||
fs::write(&path, content).unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn write_fluent_keys_rs(dir: &Path, idents: &[String]) {
|
|
||||||
let path = dir.join("fluent_keys.rs");
|
|
||||||
fs::write(&path, rust_string_vec(idents)).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn write_fluent_proto_inner(path: &Path, idents: &[String]) {
|
|
||||||
fs::write(&path, proto_enum(idents)).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Write fluent.proto into the provided dir.
|
|
||||||
/// Can be called separately to provide a proto
|
|
||||||
/// to downstream code.
|
|
||||||
pub fn write_fluent_proto(out_path: &str) {
|
|
||||||
let merged_ftl = merge_ftl_data(get_ftl_data());
|
|
||||||
let idents = get_identifiers(merged_ftl.get("template").unwrap());
|
|
||||||
write_fluent_proto_inner(Path::new(out_path), &idents);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Write all ftl-related files into OUT_DIR.
|
|
||||||
pub fn write_ftl_files_and_fluent_rs() {
|
|
||||||
let dir = PathBuf::from(std::env::var("OUT_DIR").unwrap());
|
|
||||||
let merged_ftl = merge_ftl_data(get_ftl_data());
|
|
||||||
write_merged_ftl_files(&dir, &merged_ftl);
|
|
||||||
|
|
||||||
let idents = get_identifiers(merged_ftl.get("template").unwrap());
|
|
||||||
write_fluent_keys_rs(&dir, &idents);
|
|
||||||
write_fluent_proto_inner(&dir.join("fluent.proto"), &idents);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn all() {
|
|
||||||
let idents = get_identifiers("key-one = foo\nkey-two = bar");
|
|
||||||
assert_eq!(idents, vec!["key-one", "key-two"]);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
proto_enum(&idents),
|
|
||||||
r#"// This file is automatically generated as part of the build process.
|
|
||||||
|
|
||||||
syntax = "proto3";
|
|
||||||
package backend_strings;
|
|
||||||
enum FluentString {
|
|
||||||
KEY_ONE = 0;
|
|
||||||
KEY_TWO = 1;
|
|
||||||
}
|
|
||||||
"#
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
rust_string_vec(&idents),
|
|
||||||
r#"// This file is automatically generated as part of the build process.
|
|
||||||
|
|
||||||
const FLUENT_KEYS: &[&str] = &[
|
|
||||||
"key-one",
|
|
||||||
"key-two",
|
|
||||||
];
|
|
||||||
"#
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -83,12 +83,11 @@ pub fn write_backend_proto_rs() {
|
||||||
backend_proto = PathBuf::from("backend.proto");
|
backend_proto = PathBuf::from("backend.proto");
|
||||||
proto_dir = PathBuf::from(".");
|
proto_dir = PathBuf::from(".");
|
||||||
}
|
}
|
||||||
let fluent_proto = out_dir.join("fluent.proto");
|
|
||||||
|
|
||||||
let mut config = prost_build::Config::new();
|
let mut config = prost_build::Config::new();
|
||||||
config
|
config
|
||||||
.out_dir(&out_dir)
|
.out_dir(&out_dir)
|
||||||
.service_generator(service_generator())
|
.service_generator(service_generator())
|
||||||
.compile_protos(&[&backend_proto, &fluent_proto], &[&proto_dir, &out_dir])
|
.compile_protos(&[&backend_proto], &[&proto_dir, &out_dir])
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,9 +0,0 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|
||||||
|
|
||||||
include!("mergeftl.rs");
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
let args: Vec<_> = std::env::args().collect();
|
|
||||||
write_fluent_proto(&args[1]);
|
|
||||||
}
|
|
||||||
|
|
@ -201,15 +201,6 @@ alias(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
alias(
|
|
||||||
name = "num_format",
|
|
||||||
actual = "@raze__num_format__0_4_0//:num_format",
|
|
||||||
tags = [
|
|
||||||
"cargo-raze",
|
|
||||||
"manual",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
alias(
|
alias(
|
||||||
name = "num_integer",
|
name = "num_integer",
|
||||||
actual = "@raze__num_integer__0_1_44//:num_integer",
|
actual = "@raze__num_integer__0_1_44//:num_integer",
|
||||||
|
|
|
||||||
102
rslib/i18n/BUILD.bazel
Normal file
102
rslib/i18n/BUILD.bazel
Normal file
|
|
@ -0,0 +1,102 @@
|
||||||
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
load("@io_bazel_rules_rust//rust:rust.bzl", "rust_binary", "rust_library", "rust_test")
|
||||||
|
load("@io_bazel_rules_rust//cargo:cargo_build_script.bzl", "cargo_build_script")
|
||||||
|
load("//rslib:rustfmt.bzl", "rustfmt_fix", "rustfmt_test")
|
||||||
|
|
||||||
|
# Build script
|
||||||
|
#######################
|
||||||
|
|
||||||
|
cargo_build_script(
|
||||||
|
name = "build_script",
|
||||||
|
srcs = glob(["build/*.rs"]),
|
||||||
|
build_script_env = {
|
||||||
|
"RSLIB_FTL_ROOT": "$(location @rslib_ftl//:l10n.toml)",
|
||||||
|
"EXTRA_FTL_ROOT": "$(location @extra_ftl//:l10n.toml)",
|
||||||
|
},
|
||||||
|
crate_root = "build/main.rs",
|
||||||
|
data = [
|
||||||
|
"//ftl",
|
||||||
|
# bazel requires us to list these out separately
|
||||||
|
"@rslib_ftl//:l10n.toml",
|
||||||
|
"@extra_ftl//:l10n.toml",
|
||||||
|
],
|
||||||
|
deps = [
|
||||||
|
"//rslib/i18n/cargo:fluent",
|
||||||
|
"//rslib/i18n/cargo:fluent_syntax",
|
||||||
|
"//rslib/i18n/cargo:inflections",
|
||||||
|
"//rslib/i18n/cargo:serde",
|
||||||
|
"//rslib/i18n/cargo:serde_json",
|
||||||
|
"//rslib/i18n/cargo:unic_langid",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Library
|
||||||
|
#######################
|
||||||
|
|
||||||
|
rust_library(
|
||||||
|
name = "anki_i18n",
|
||||||
|
srcs = glob([
|
||||||
|
"src/**/*.rs",
|
||||||
|
]),
|
||||||
|
visibility = ["//rslib:__subpackages__"],
|
||||||
|
deps = [
|
||||||
|
":build_script",
|
||||||
|
"//rslib/i18n/cargo:fluent",
|
||||||
|
"//rslib/i18n/cargo:intl_memoizer",
|
||||||
|
"//rslib/i18n/cargo:num_format",
|
||||||
|
"//rslib/i18n/cargo:phf",
|
||||||
|
"//rslib/i18n/cargo:serde",
|
||||||
|
"//rslib/i18n/cargo:serde_json",
|
||||||
|
"//rslib/i18n/cargo:unic_langid",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Tests
|
||||||
|
#######################
|
||||||
|
|
||||||
|
rust_test(
|
||||||
|
name = "i18n_tests",
|
||||||
|
crate = ":anki_i18n",
|
||||||
|
)
|
||||||
|
|
||||||
|
rustfmt_test(
|
||||||
|
name = "format_check",
|
||||||
|
srcs = glob([
|
||||||
|
"**/*.rs",
|
||||||
|
]),
|
||||||
|
)
|
||||||
|
|
||||||
|
rustfmt_fix(
|
||||||
|
name = "format",
|
||||||
|
srcs = glob([
|
||||||
|
"**/*.rs",
|
||||||
|
]),
|
||||||
|
)
|
||||||
|
|
||||||
|
# strings.json copying
|
||||||
|
###########################
|
||||||
|
# This separate binary is used to copy the generated strings.json into another location
|
||||||
|
# for downstream consumers
|
||||||
|
|
||||||
|
rust_binary(
|
||||||
|
name = "write_json",
|
||||||
|
srcs = [
|
||||||
|
"build/write_json.rs",
|
||||||
|
],
|
||||||
|
deps = [
|
||||||
|
":build_script",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
genrule(
|
||||||
|
name = "strings_json",
|
||||||
|
outs = ["strings.json"],
|
||||||
|
cmd = """\
|
||||||
|
$(location :write_json) $(location strings.json)""",
|
||||||
|
tools = [
|
||||||
|
":write_json",
|
||||||
|
],
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
)
|
||||||
35
rslib/i18n/Cargo.toml
Normal file
35
rslib/i18n/Cargo.toml
Normal file
|
|
@ -0,0 +1,35 @@
|
||||||
|
[package]
|
||||||
|
name = "anki_i18n"
|
||||||
|
version = "0.0.0"
|
||||||
|
edition = "2018"
|
||||||
|
authors = ["Ankitects Pty Ltd and contributors"]
|
||||||
|
license = "AGPL-3.0-or-later"
|
||||||
|
description = "Anki's Rust library i18n code"
|
||||||
|
build = "build/main.rs"
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
name = "anki_i18n"
|
||||||
|
path = "src/lib.rs"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "write_json"
|
||||||
|
path = "build/write_json.rs"
|
||||||
|
|
||||||
|
# After updating anything below, run ../cargo/update.py
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
fluent-syntax = "0.10"
|
||||||
|
fluent = "0.13.1"
|
||||||
|
unic-langid = { version = "0.9", features = ["macros"] }
|
||||||
|
serde = { version = "1.0.114", features = ["derive"] }
|
||||||
|
serde_json = "1.0.56"
|
||||||
|
inflections = "1.1.1"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
phf = { version = "0.8", features = ["macros"] }
|
||||||
|
fluent = "0.13.1"
|
||||||
|
num-format = "0.4.0"
|
||||||
|
unic-langid = { version = "0.9", features = ["macros"] }
|
||||||
|
serde = { version = "1.0.114", features = ["derive"] }
|
||||||
|
serde_json = "1.0.56"
|
||||||
|
intl-memoizer = "0.5"
|
||||||
31
rslib/i18n/build/check.rs
Normal file
31
rslib/i18n/build/check.rs
Normal file
|
|
@ -0,0 +1,31 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
//! Check the .ftl files at build time to ensure we don't get runtime load failures.
|
||||||
|
|
||||||
|
use super::gather::TranslationsByLang;
|
||||||
|
use fluent::{FluentBundle, FluentResource};
|
||||||
|
use unic_langid::LanguageIdentifier;
|
||||||
|
|
||||||
|
pub fn check(lang_map: &TranslationsByLang) {
|
||||||
|
for (lang, files_map) in lang_map {
|
||||||
|
for (fname, content) in files_map {
|
||||||
|
check_content(lang, fname, content);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_content(lang: &str, fname: &str, content: &str) {
|
||||||
|
let lang_id: LanguageIdentifier = "en-US".parse().unwrap();
|
||||||
|
let resource = FluentResource::try_new(content.into()).unwrap_or_else(|e| {
|
||||||
|
panic!("{}\nUnable to parse {}/{}: {:?}", content, lang, fname, e);
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut bundle: FluentBundle<FluentResource> = FluentBundle::new(&[lang_id]);
|
||||||
|
bundle.add_resource(resource).unwrap_or_else(|e| {
|
||||||
|
panic!(
|
||||||
|
"{}\nUnable to bundle - duplicate key? {}/{}: {:?}",
|
||||||
|
content, lang, fname, e
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
115
rslib/i18n/build/extract.rs
Normal file
115
rslib/i18n/build/extract.rs
Normal file
|
|
@ -0,0 +1,115 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
use crate::gather::TranslationsByLang;
|
||||||
|
use fluent_syntax::ast::{Entry, Expression, InlineExpression, Pattern, PatternElement};
|
||||||
|
use fluent_syntax::parser::Parser;
|
||||||
|
use serde::Serialize;
|
||||||
|
use std::{collections::HashSet, fmt::Write};
|
||||||
|
#[derive(Debug, PartialOrd, Ord, PartialEq, Eq, Serialize)]
|
||||||
|
pub struct Module {
|
||||||
|
pub name: String,
|
||||||
|
pub translations: Vec<Translation>,
|
||||||
|
pub index: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialOrd, Ord, PartialEq, Eq, Serialize)]
|
||||||
|
pub struct Translation {
|
||||||
|
pub key: String,
|
||||||
|
pub text: String,
|
||||||
|
pub variables: Vec<String>,
|
||||||
|
pub index: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_modules(data: &TranslationsByLang) -> Vec<Module> {
|
||||||
|
let mut output = vec![];
|
||||||
|
|
||||||
|
for (module, text) in &data["templates"] {
|
||||||
|
output.push(Module {
|
||||||
|
name: module.to_string(),
|
||||||
|
translations: extract_metadata(text),
|
||||||
|
index: 0,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
output.sort_unstable();
|
||||||
|
|
||||||
|
for (module_idx, module) in output.iter_mut().enumerate() {
|
||||||
|
module.index = module_idx;
|
||||||
|
for (entry_idx, entry) in module.translations.iter_mut().enumerate() {
|
||||||
|
entry.index = entry_idx;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
output
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_metadata(ftl_text: &str) -> Vec<Translation> {
|
||||||
|
let res = Parser::new(ftl_text).parse().unwrap();
|
||||||
|
let mut output = vec![];
|
||||||
|
|
||||||
|
for entry in res.body {
|
||||||
|
if let Entry::Message(m) = entry {
|
||||||
|
if let Some(pattern) = m.value {
|
||||||
|
let mut visitor = Visitor::default();
|
||||||
|
visitor.visit_pattern(&pattern);
|
||||||
|
let key = m.id.name.to_string();
|
||||||
|
|
||||||
|
let (text, variables) = visitor.into_output();
|
||||||
|
|
||||||
|
output.push(Translation {
|
||||||
|
key,
|
||||||
|
text,
|
||||||
|
variables,
|
||||||
|
index: 0,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
output.sort_unstable();
|
||||||
|
|
||||||
|
output
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gather variable names and (rough) text from Fluent AST.
|
||||||
|
#[derive(Default)]
|
||||||
|
struct Visitor {
|
||||||
|
text: String,
|
||||||
|
variables: HashSet<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Visitor {
|
||||||
|
fn into_output(self) -> (String, Vec<String>) {
|
||||||
|
let mut vars: Vec<_> = self.variables.into_iter().collect();
|
||||||
|
vars.sort_unstable();
|
||||||
|
(self.text, vars)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_pattern(&mut self, pattern: &Pattern<&str>) {
|
||||||
|
for element in &pattern.elements {
|
||||||
|
match element {
|
||||||
|
PatternElement::TextElement { value } => self.text.push_str(value),
|
||||||
|
PatternElement::Placeable { expression } => self.visit_expression(expression),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_expression(&mut self, expression: &Expression<&str>) {
|
||||||
|
match expression {
|
||||||
|
Expression::SelectExpression { variants, .. } => {
|
||||||
|
self.visit_pattern(&variants.last().unwrap().value)
|
||||||
|
}
|
||||||
|
Expression::InlineExpression(expr) => match expr {
|
||||||
|
InlineExpression::VariableReference { id } => {
|
||||||
|
write!(self.text, "${}", id.name).unwrap();
|
||||||
|
self.variables.insert(id.name.to_string());
|
||||||
|
}
|
||||||
|
InlineExpression::Placeable { expression } => {
|
||||||
|
self.visit_expression(expression);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
123
rslib/i18n/build/gather.rs
Normal file
123
rslib/i18n/build/gather.rs
Normal file
|
|
@ -0,0 +1,123 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
//! Env vars that control behaviour:
|
||||||
|
//! - FTL_SRC can be pointed at /ftl/BUILD.bazel to tell the script where the translatinos
|
||||||
|
//! in the source tree can be found. If not set (when building from cargo), the script
|
||||||
|
//! will look in the parent folders instead.
|
||||||
|
//! - RSLIB_FTL_ROOT should be set to the l10n.toml file inside the core translation repo.
|
||||||
|
//! - EXTRA_FTL_ROOT should be set to the l10n.toml file inside the qt translation repo.
|
||||||
|
//! - If NO_QT_TEMPLATES is set, EXTRA_FTL_ROOT can be pointed at a l10n.toml file in a separate
|
||||||
|
//! location, to include files from there. In this case, the standard Qt templates will not
|
||||||
|
//! be included from the source tree.
|
||||||
|
|
||||||
|
use std::path::Path;
|
||||||
|
use std::{collections::HashMap, env};
|
||||||
|
use std::{fs, path::PathBuf};
|
||||||
|
|
||||||
|
pub type TranslationsByFile = HashMap<String, String>;
|
||||||
|
pub type TranslationsByLang = HashMap<String, TranslationsByFile>;
|
||||||
|
|
||||||
|
/// Read the contents of the FTL files into a TranslationMap structure.
|
||||||
|
pub fn get_ftl_data() -> TranslationsByLang {
|
||||||
|
let mut map = TranslationsByLang::default();
|
||||||
|
let include_qt = include_local_qt_templates();
|
||||||
|
|
||||||
|
// English templates first
|
||||||
|
let ftl_base = source_tree_root();
|
||||||
|
add_folder(&mut map, &ftl_base.join("core"), "templates");
|
||||||
|
if include_qt {
|
||||||
|
add_folder(&mut map, &ftl_base.join("qt"), "templates");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Core translations provided?
|
||||||
|
if let Some(path) = core_ftl_root() {
|
||||||
|
add_translation_root(&mut map, &path, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extra templates/translations provided?
|
||||||
|
if let Some(path) = extra_ftl_root() {
|
||||||
|
add_translation_root(&mut map, &path, include_qt);
|
||||||
|
}
|
||||||
|
|
||||||
|
map
|
||||||
|
}
|
||||||
|
|
||||||
|
/// For each .ftl file in the provided folder, add it to the translation map.
|
||||||
|
fn add_folder(map: &mut TranslationsByLang, folder: &Path, lang: &str) {
|
||||||
|
let map_entry = map.entry(lang.to_string()).or_default();
|
||||||
|
for entry in fs::read_dir(&folder).unwrap() {
|
||||||
|
let entry = entry.unwrap();
|
||||||
|
let fname = entry.file_name().to_string_lossy().to_string();
|
||||||
|
if !fname.ends_with(".ftl") {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let module = fname.trim_end_matches(".ftl").replace("-", "_");
|
||||||
|
let text = fs::read_to_string(&entry.path()).unwrap();
|
||||||
|
assert!(
|
||||||
|
text.ends_with('\n'),
|
||||||
|
"file was missing final newline: {:?}",
|
||||||
|
entry
|
||||||
|
);
|
||||||
|
map_entry.entry(module).or_default().push_str(&text);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// For each language folder in `root`, add the ftl files stored inside.
|
||||||
|
/// If ignore_templates is true, the templates/ folder will be ignored, on the
|
||||||
|
/// assumption the templates were extracted from the source tree.
|
||||||
|
fn add_translation_root(map: &mut TranslationsByLang, root: &Path, ignore_templates: bool) {
|
||||||
|
for entry in fs::read_dir(root).unwrap() {
|
||||||
|
let entry = entry.unwrap();
|
||||||
|
let lang = entry.file_name().to_string_lossy().to_string();
|
||||||
|
if ignore_templates && lang == "templates" {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
add_folder(map, &entry.path(), &lang);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// In a standard build, the ftl/qt folder is used as the source
|
||||||
|
/// of truth for @extra_ftl, making it easier to add new strings.
|
||||||
|
/// If the Qt templates are not desired, the NO_QT_TEMPLATES env
|
||||||
|
/// var can be set to skip them.
|
||||||
|
fn include_local_qt_templates() -> bool {
|
||||||
|
env::var("NO_QT_TEMPLATES").is_err()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn source_tree_root() -> PathBuf {
|
||||||
|
if let Ok(srcfile) = env::var("FTL_SRC") {
|
||||||
|
let mut path = PathBuf::from(srcfile);
|
||||||
|
path.pop();
|
||||||
|
path
|
||||||
|
} else {
|
||||||
|
PathBuf::from("../../ftl")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn core_ftl_root() -> Option<PathBuf> {
|
||||||
|
std::env::var("RSLIB_FTL_ROOT")
|
||||||
|
.ok()
|
||||||
|
.map(first_folder_next_to_l10n_file)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extra_ftl_root() -> Option<PathBuf> {
|
||||||
|
std::env::var("EXTRA_FTL_ROOT")
|
||||||
|
.ok()
|
||||||
|
.map(first_folder_next_to_l10n_file)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn first_folder_next_to_l10n_file(l10n_path: String) -> PathBuf {
|
||||||
|
// drop the filename
|
||||||
|
let mut path = PathBuf::from(&l10n_path);
|
||||||
|
path.pop();
|
||||||
|
// iterate over the folder
|
||||||
|
for entry in path.read_dir().unwrap() {
|
||||||
|
let entry = entry.unwrap();
|
||||||
|
if entry.metadata().unwrap().is_dir() {
|
||||||
|
// return the first folder we find
|
||||||
|
return entry.path();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
panic!("no folder found in {}", l10n_path);
|
||||||
|
}
|
||||||
30
rslib/i18n/build/main.rs
Normal file
30
rslib/i18n/build/main.rs
Normal file
|
|
@ -0,0 +1,30 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
mod check;
|
||||||
|
mod extract;
|
||||||
|
mod gather;
|
||||||
|
mod write_strings;
|
||||||
|
|
||||||
|
use std::{fs, path::PathBuf};
|
||||||
|
|
||||||
|
use check::check;
|
||||||
|
use extract::get_modules;
|
||||||
|
use gather::get_ftl_data;
|
||||||
|
use write_strings::write_strings;
|
||||||
|
|
||||||
|
// fixme: check all variables are present in translations as well?
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
// generate our own requirements
|
||||||
|
let map = get_ftl_data();
|
||||||
|
check(&map);
|
||||||
|
let modules = get_modules(&map);
|
||||||
|
write_strings(&map, &modules);
|
||||||
|
|
||||||
|
// put a json file into OUT_DIR that the write_json tool can read
|
||||||
|
let meta_json = serde_json::to_string_pretty(&modules).unwrap();
|
||||||
|
let dir = PathBuf::from(std::env::var("OUT_DIR").unwrap());
|
||||||
|
let path = dir.join("strings.json");
|
||||||
|
fs::write(path, meta_json).unwrap();
|
||||||
|
}
|
||||||
16
rslib/i18n/build/write_json.rs
Normal file
16
rslib/i18n/build/write_json.rs
Normal file
|
|
@ -0,0 +1,16 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
env, fs,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn main() {
|
||||||
|
let args: Vec<_> = env::args().collect();
|
||||||
|
let target_file = Path::new(args.get(1).expect("output path not provided"));
|
||||||
|
|
||||||
|
let dir = PathBuf::from(env!("OUT_DIR"));
|
||||||
|
let path = dir.join("strings.json");
|
||||||
|
fs::copy(path, target_file).unwrap();
|
||||||
|
}
|
||||||
133
rslib/i18n/build/write_strings.rs
Normal file
133
rslib/i18n/build/write_strings.rs
Normal file
|
|
@ -0,0 +1,133 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
//! Write strings to a strings.rs file that will be compiled into the binary.
|
||||||
|
|
||||||
|
use inflections::Inflect;
|
||||||
|
use std::{fmt::Write, fs, path::PathBuf};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
extract::Module,
|
||||||
|
gather::{TranslationsByFile, TranslationsByLang},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn write_strings(map: &TranslationsByLang, modules: &[Module]) {
|
||||||
|
let mut buf = String::new();
|
||||||
|
|
||||||
|
// lang->module map
|
||||||
|
write_lang_map(map, &mut buf);
|
||||||
|
// module name->translations
|
||||||
|
write_translations_by_module(map, &mut buf);
|
||||||
|
// ordered list of translations by module
|
||||||
|
write_translation_key_index(modules, &mut buf);
|
||||||
|
write_legacy_tr_enum(modules, &mut buf);
|
||||||
|
|
||||||
|
let dir = PathBuf::from(std::env::var("OUT_DIR").unwrap());
|
||||||
|
let path = dir.join("strings.rs");
|
||||||
|
fs::write(&path, buf).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_legacy_tr_enum(modules: &[Module], buf: &mut String) {
|
||||||
|
buf.push_str("pub enum LegacyKey {\n");
|
||||||
|
for module in modules {
|
||||||
|
for translation in &module.translations {
|
||||||
|
let key = translation.key.to_pascal_case();
|
||||||
|
let number = module.index * 1000 + translation.index;
|
||||||
|
writeln!(buf, r#" {key} = {number},"#, key = key, number = number).unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.push_str("}\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_translation_key_index(modules: &[Module], buf: &mut String) {
|
||||||
|
for module in modules {
|
||||||
|
writeln!(
|
||||||
|
buf,
|
||||||
|
"pub(crate) const {key}: [&str; {count}] = [",
|
||||||
|
key = module_constant_name(&module.name),
|
||||||
|
count = module.translations.len(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
for translation in &module.translations {
|
||||||
|
writeln!(buf, r#" "{key}","#, key = translation.key).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.push_str("];\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
writeln!(
|
||||||
|
buf,
|
||||||
|
"pub(crate) const KEYS_BY_MODULE: [&[&str]; {count}] = [",
|
||||||
|
count = modules.len(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
for module in modules {
|
||||||
|
writeln!(
|
||||||
|
buf,
|
||||||
|
r#" &{module_slice},"#,
|
||||||
|
module_slice = module_constant_name(&module.name)
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.push_str("];\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_lang_map(map: &TranslationsByLang, buf: &mut String) {
|
||||||
|
buf.push_str(
|
||||||
|
"
|
||||||
|
pub(crate) const STRINGS: phf::Map<&str, &phf::Map<&str, &str>> = phf::phf_map! {
|
||||||
|
",
|
||||||
|
);
|
||||||
|
|
||||||
|
for lang in map.keys() {
|
||||||
|
writeln!(
|
||||||
|
buf,
|
||||||
|
r#" "{lang}" => &{constant},"#,
|
||||||
|
lang = lang,
|
||||||
|
constant = lang_constant_name(lang)
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.push_str("};\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_translations_by_module(map: &TranslationsByLang, buf: &mut String) {
|
||||||
|
for (lang, modules) in map {
|
||||||
|
write_module_map(buf, lang, modules);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_module_map(buf: &mut String, lang: &str, modules: &TranslationsByFile) {
|
||||||
|
writeln!(
|
||||||
|
buf,
|
||||||
|
"
|
||||||
|
pub(crate) const {lang_name}: phf::Map<&str, &str> = phf::phf_map! {{",
|
||||||
|
lang_name = lang_constant_name(lang)
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
for (module, contents) in modules {
|
||||||
|
writeln!(
|
||||||
|
buf,
|
||||||
|
r###" "{module}" => r##"{contents}"##,"###,
|
||||||
|
module = module,
|
||||||
|
contents = contents
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.push_str("};\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lang_constant_name(lang: &str) -> String {
|
||||||
|
lang.to_ascii_uppercase().replace("-", "_")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn module_constant_name(module: &str) -> String {
|
||||||
|
format!("{}_KEYS", module.to_ascii_uppercase())
|
||||||
|
}
|
||||||
103
rslib/i18n/cargo/BUILD.bazel
Normal file
103
rslib/i18n/cargo/BUILD.bazel
Normal file
|
|
@ -0,0 +1,103 @@
|
||||||
|
"""
|
||||||
|
@generated
|
||||||
|
cargo-raze generated Bazel file.
|
||||||
|
|
||||||
|
DO NOT EDIT! Replaced on runs of cargo-raze
|
||||||
|
"""
|
||||||
|
|
||||||
|
package(default_visibility = ["//visibility:public"])
|
||||||
|
|
||||||
|
licenses([
|
||||||
|
"notice", # See individual crates for specific licenses
|
||||||
|
])
|
||||||
|
|
||||||
|
# Aliased targets
|
||||||
|
alias(
|
||||||
|
name = "fluent",
|
||||||
|
actual = "@raze__fluent__0_13_1//:fluent",
|
||||||
|
tags = [
|
||||||
|
"cargo-raze",
|
||||||
|
"manual",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
alias(
|
||||||
|
name = "fluent_syntax",
|
||||||
|
actual = "@raze__fluent_syntax__0_10_3//:fluent_syntax",
|
||||||
|
tags = [
|
||||||
|
"cargo-raze",
|
||||||
|
"manual",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
alias(
|
||||||
|
name = "inflections",
|
||||||
|
actual = "@raze__inflections__1_1_1//:inflections",
|
||||||
|
tags = [
|
||||||
|
"cargo-raze",
|
||||||
|
"manual",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
alias(
|
||||||
|
name = "intl_memoizer",
|
||||||
|
actual = "@raze__intl_memoizer__0_5_1//:intl_memoizer",
|
||||||
|
tags = [
|
||||||
|
"cargo-raze",
|
||||||
|
"manual",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
alias(
|
||||||
|
name = "num_format",
|
||||||
|
actual = "@raze__num_format__0_4_0//:num_format",
|
||||||
|
tags = [
|
||||||
|
"cargo-raze",
|
||||||
|
"manual",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
alias(
|
||||||
|
name = "phf",
|
||||||
|
actual = "@raze__phf__0_8_0//:phf",
|
||||||
|
tags = [
|
||||||
|
"cargo-raze",
|
||||||
|
"manual",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
alias(
|
||||||
|
name = "serde",
|
||||||
|
actual = "@raze__serde__1_0_124//:serde",
|
||||||
|
tags = [
|
||||||
|
"cargo-raze",
|
||||||
|
"manual",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
alias(
|
||||||
|
name = "serde_derive",
|
||||||
|
actual = "@raze__serde_derive__1_0_124//:serde_derive",
|
||||||
|
tags = [
|
||||||
|
"cargo-raze",
|
||||||
|
"manual",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
alias(
|
||||||
|
name = "serde_json",
|
||||||
|
actual = "@raze__serde_json__1_0_64//:serde_json",
|
||||||
|
tags = [
|
||||||
|
"cargo-raze",
|
||||||
|
"manual",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
alias(
|
||||||
|
name = "unic_langid",
|
||||||
|
actual = "@raze__unic_langid__0_9_0//:unic_langid",
|
||||||
|
tags = [
|
||||||
|
"cargo-raze",
|
||||||
|
"manual",
|
||||||
|
],
|
||||||
|
)
|
||||||
5
rslib/i18n/src/generated.rs
Normal file
5
rslib/i18n/src/generated.rs
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
// Include auto-generated content
|
||||||
|
|
||||||
|
#![allow(clippy::all)]
|
||||||
|
|
||||||
|
include!(concat!(env!("OUT_DIR"), "/strings.rs"));
|
||||||
491
rslib/i18n/src/lib.rs
Normal file
491
rslib/i18n/src/lib.rs
Normal file
|
|
@ -0,0 +1,491 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
mod generated;
|
||||||
|
|
||||||
|
use fluent::{concurrent::FluentBundle, FluentArgs, FluentResource, FluentValue};
|
||||||
|
use num_format::Locale;
|
||||||
|
use serde::Serialize;
|
||||||
|
use std::borrow::Cow;
|
||||||
|
use std::sync::{Arc, Mutex};
|
||||||
|
use unic_langid::LanguageIdentifier;
|
||||||
|
|
||||||
|
use generated::{KEYS_BY_MODULE, STRINGS};
|
||||||
|
|
||||||
|
pub use generated::LegacyKey as TR;
|
||||||
|
|
||||||
|
pub use fluent::fluent_args as tr_args;
|
||||||
|
|
||||||
|
/// Helper for creating args with &strs
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! tr_strs {
|
||||||
|
( $($key:expr => $value:expr),* ) => {
|
||||||
|
{
|
||||||
|
let mut args: fluent::FluentArgs = fluent::FluentArgs::new();
|
||||||
|
$(
|
||||||
|
args.add($key, $value.to_string().into());
|
||||||
|
)*
|
||||||
|
args
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn remapped_lang_name(lang: &LanguageIdentifier) -> &str {
|
||||||
|
let region = match &lang.region {
|
||||||
|
Some(region) => Some(region.as_str()),
|
||||||
|
None => None,
|
||||||
|
};
|
||||||
|
match lang.language.as_str() {
|
||||||
|
"en" => {
|
||||||
|
match region {
|
||||||
|
Some("GB") | Some("AU") => "en-GB",
|
||||||
|
// go directly to fallback
|
||||||
|
_ => "templates",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"zh" => match region {
|
||||||
|
Some("TW") | Some("HK") => "zh-TW",
|
||||||
|
_ => "zh-CN",
|
||||||
|
},
|
||||||
|
"pt" => {
|
||||||
|
if let Some("PT") = region {
|
||||||
|
"pt-PT"
|
||||||
|
} else {
|
||||||
|
"pt-BR"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"ga" => "ga-IE",
|
||||||
|
"hy" => "hy-AM",
|
||||||
|
"nb" => "nb-NO",
|
||||||
|
"sv" => "sv-SE",
|
||||||
|
other => other,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Some sample text for testing purposes.
|
||||||
|
fn test_en_text() -> &'static str {
|
||||||
|
"
|
||||||
|
valid-key = a valid key
|
||||||
|
only-in-english = not translated
|
||||||
|
two-args-key = two args: {$one} and {$two}
|
||||||
|
plural = You have {$hats ->
|
||||||
|
[one] 1 hat
|
||||||
|
*[other] {$hats} hats
|
||||||
|
}.
|
||||||
|
"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_jp_text() -> &'static str {
|
||||||
|
"
|
||||||
|
valid-key = キー
|
||||||
|
two-args-key = {$one}と{$two}
|
||||||
|
"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_pl_text() -> &'static str {
|
||||||
|
"
|
||||||
|
one-arg-key = fake Polish {$one}
|
||||||
|
"
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse resource text into an AST for inclusion in a bundle.
|
||||||
|
/// Returns None if text contains errors.
|
||||||
|
/// extra_text may contain resources loaded from the filesystem
|
||||||
|
/// at runtime. If it contains errors, they will not prevent a
|
||||||
|
/// bundle from being returned.
|
||||||
|
fn get_bundle(
|
||||||
|
text: &str,
|
||||||
|
extra_text: String,
|
||||||
|
locales: &[LanguageIdentifier],
|
||||||
|
) -> Option<FluentBundle<FluentResource>> {
|
||||||
|
let res = FluentResource::try_new(text.into())
|
||||||
|
.map_err(|e| {
|
||||||
|
println!("Unable to parse translations file: {:?}", e);
|
||||||
|
})
|
||||||
|
.ok()?;
|
||||||
|
|
||||||
|
let mut bundle: FluentBundle<FluentResource> = FluentBundle::new(locales);
|
||||||
|
bundle
|
||||||
|
.add_resource(res)
|
||||||
|
.map_err(|e| {
|
||||||
|
println!("Duplicate key detected in translation file: {:?}", e);
|
||||||
|
})
|
||||||
|
.ok()?;
|
||||||
|
|
||||||
|
if !extra_text.is_empty() {
|
||||||
|
match FluentResource::try_new(extra_text) {
|
||||||
|
Ok(res) => bundle.add_resource_overriding(res),
|
||||||
|
Err((_res, e)) => println!("Unable to parse translations file: {:?}", e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// add numeric formatter
|
||||||
|
set_bundle_formatter_for_langs(&mut bundle, locales);
|
||||||
|
|
||||||
|
Some(bundle)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a bundle that includes any filesystem overrides.
|
||||||
|
fn get_bundle_with_extra(
|
||||||
|
text: &str,
|
||||||
|
lang: Option<LanguageIdentifier>,
|
||||||
|
) -> Option<FluentBundle<FluentResource>> {
|
||||||
|
let mut extra_text = "".into();
|
||||||
|
if cfg!(test) {
|
||||||
|
// inject some test strings in test mode
|
||||||
|
match &lang {
|
||||||
|
None => {
|
||||||
|
extra_text += test_en_text();
|
||||||
|
}
|
||||||
|
Some(lang) if lang.language == "ja" => {
|
||||||
|
extra_text += test_jp_text();
|
||||||
|
}
|
||||||
|
Some(lang) if lang.language == "pl" => {
|
||||||
|
extra_text += test_pl_text();
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut locales = if let Some(lang) = lang {
|
||||||
|
vec![lang]
|
||||||
|
} else {
|
||||||
|
vec![]
|
||||||
|
};
|
||||||
|
locales.push("en-US".parse().unwrap());
|
||||||
|
|
||||||
|
get_bundle(text, extra_text, &locales)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct I18n {
|
||||||
|
inner: Arc<Mutex<I18nInner>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_key_legacy(val: usize) -> &'static str {
|
||||||
|
let (module_idx, translation_idx) = (val / 1000, val % 1000);
|
||||||
|
get_key(module_idx, translation_idx)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_key(module_idx: usize, translation_idx: usize) -> &'static str {
|
||||||
|
KEYS_BY_MODULE
|
||||||
|
.get(module_idx)
|
||||||
|
.and_then(|translations| translations.get(translation_idx))
|
||||||
|
.cloned()
|
||||||
|
.unwrap_or("invalid-module-or-translation-index")
|
||||||
|
}
|
||||||
|
|
||||||
|
impl I18n {
|
||||||
|
pub fn template_only() -> Self {
|
||||||
|
Self::new::<&str>(&[])
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new<S: AsRef<str>>(locale_codes: &[S]) -> Self {
|
||||||
|
let mut input_langs = vec![];
|
||||||
|
let mut bundles = Vec::with_capacity(locale_codes.len() + 1);
|
||||||
|
let mut resource_text = vec![];
|
||||||
|
|
||||||
|
for code in locale_codes {
|
||||||
|
let code = code.as_ref();
|
||||||
|
if let Ok(lang) = code.parse::<LanguageIdentifier>() {
|
||||||
|
input_langs.push(lang.clone());
|
||||||
|
if lang.language == "en" {
|
||||||
|
// if English was listed, any further preferences are skipped,
|
||||||
|
// as the template has 100% coverage, and we need to ensure
|
||||||
|
// it is tried prior to any other langs.
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut output_langs = vec![];
|
||||||
|
for lang in input_langs {
|
||||||
|
// if the language is bundled in the binary
|
||||||
|
if let Some(text) = ftl_localized_text(&lang).or_else(|| {
|
||||||
|
// when testing, allow missing translations
|
||||||
|
if cfg!(test) {
|
||||||
|
Some(String::new())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}) {
|
||||||
|
if let Some(bundle) = get_bundle_with_extra(&text, Some(lang.clone())) {
|
||||||
|
resource_text.push(text);
|
||||||
|
bundles.push(bundle);
|
||||||
|
output_langs.push(lang);
|
||||||
|
} else {
|
||||||
|
println!("Failed to create bundle for {:?}", lang.language)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// add English templates
|
||||||
|
let template_lang = "en-US".parse().unwrap();
|
||||||
|
let template_text = ftl_localized_text(&template_lang).unwrap();
|
||||||
|
let template_bundle = get_bundle_with_extra(&template_text, None).unwrap();
|
||||||
|
resource_text.push(template_text);
|
||||||
|
bundles.push(template_bundle);
|
||||||
|
output_langs.push(template_lang);
|
||||||
|
|
||||||
|
if locale_codes.is_empty() || cfg!(test) {
|
||||||
|
// disable isolation characters in test mode
|
||||||
|
for bundle in &mut bundles {
|
||||||
|
bundle.set_use_isolating(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Self {
|
||||||
|
inner: Arc::new(Mutex::new(I18nInner {
|
||||||
|
bundles,
|
||||||
|
langs: output_langs,
|
||||||
|
resource_text,
|
||||||
|
})),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get translation with zero arguments.
|
||||||
|
pub fn tr(&self, key: TR) -> Cow<str> {
|
||||||
|
let key = get_key_legacy(key as usize);
|
||||||
|
self.tr_(key, None)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get translation with one or more arguments.
|
||||||
|
pub fn trn(&self, key: TR, args: FluentArgs) -> String {
|
||||||
|
let key = get_key_legacy(key as usize);
|
||||||
|
self.tr_(key, Some(args)).into()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn trn2(&self, key: usize, args: FluentArgs) -> String {
|
||||||
|
let key = get_key_legacy(key);
|
||||||
|
self.tr_(key, Some(args)).into()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tr_<'a>(&'a self, key: &str, args: Option<FluentArgs>) -> Cow<'a, str> {
|
||||||
|
for bundle in &self.inner.lock().unwrap().bundles {
|
||||||
|
let msg = match bundle.get_message(key) {
|
||||||
|
Some(msg) => msg,
|
||||||
|
// not translated in this bundle
|
||||||
|
None => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
let pat = match msg.value {
|
||||||
|
Some(val) => val,
|
||||||
|
// empty value
|
||||||
|
None => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut errs = vec![];
|
||||||
|
let out = bundle.format_pattern(pat, args.as_ref(), &mut errs);
|
||||||
|
if !errs.is_empty() {
|
||||||
|
println!("Error(s) in translation '{}': {:?}", key, errs);
|
||||||
|
}
|
||||||
|
// clone so we can discard args
|
||||||
|
return out.to_string().into();
|
||||||
|
}
|
||||||
|
|
||||||
|
// return the key name if it was missing
|
||||||
|
key.to_string().into()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return text from configured locales for use with the JS Fluent implementation.
|
||||||
|
pub fn resources_for_js(&self) -> ResourcesForJavascript {
|
||||||
|
let inner = self.inner.lock().unwrap();
|
||||||
|
ResourcesForJavascript {
|
||||||
|
langs: inner.langs.iter().map(ToString::to_string).collect(),
|
||||||
|
resources: inner.resource_text.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This temporarily behaves like the older code; in the future we could either
|
||||||
|
/// access each &str separately, or load them on demand.
|
||||||
|
fn ftl_localized_text(lang: &LanguageIdentifier) -> Option<String> {
|
||||||
|
let lang = remapped_lang_name(lang);
|
||||||
|
if let Some(module) = STRINGS.get(lang) {
|
||||||
|
let mut text = String::new();
|
||||||
|
for module_text in module.values() {
|
||||||
|
text.push_str(module_text)
|
||||||
|
}
|
||||||
|
Some(text)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct I18nInner {
|
||||||
|
// bundles in preferred language order, with template English as the
|
||||||
|
// last element
|
||||||
|
bundles: Vec<FluentBundle<FluentResource>>,
|
||||||
|
langs: Vec<LanguageIdentifier>,
|
||||||
|
// fixme: this is a relic from the old implementation, and we could gather
|
||||||
|
// it only when needed in the future
|
||||||
|
resource_text: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Simple number formatting implementation
|
||||||
|
|
||||||
|
fn set_bundle_formatter_for_langs<T>(bundle: &mut FluentBundle<T>, langs: &[LanguageIdentifier]) {
|
||||||
|
let formatter = if want_comma_as_decimal_separator(langs) {
|
||||||
|
format_decimal_with_comma
|
||||||
|
} else {
|
||||||
|
format_decimal_with_period
|
||||||
|
};
|
||||||
|
|
||||||
|
bundle.set_formatter(Some(formatter));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn first_available_num_format_locale(langs: &[LanguageIdentifier]) -> Option<Locale> {
|
||||||
|
for lang in langs {
|
||||||
|
if let Some(locale) = num_format_locale(lang) {
|
||||||
|
return Some(locale);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
// try to locate a num_format locale for a given language identifier
|
||||||
|
fn num_format_locale(lang: &LanguageIdentifier) -> Option<Locale> {
|
||||||
|
// region provided?
|
||||||
|
if let Some(region) = lang.region {
|
||||||
|
let code = format!("{}_{}", lang.language, region);
|
||||||
|
if let Ok(locale) = Locale::from_name(code) {
|
||||||
|
return Some(locale);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// try the language alone
|
||||||
|
Locale::from_name(lang.language.as_str()).ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn want_comma_as_decimal_separator(langs: &[LanguageIdentifier]) -> bool {
|
||||||
|
let separator = if let Some(locale) = first_available_num_format_locale(langs) {
|
||||||
|
locale.decimal()
|
||||||
|
} else {
|
||||||
|
"."
|
||||||
|
};
|
||||||
|
|
||||||
|
separator == ","
|
||||||
|
}
|
||||||
|
|
||||||
|
fn format_decimal_with_comma(
|
||||||
|
val: &fluent::FluentValue,
|
||||||
|
_intl: &intl_memoizer::concurrent::IntlLangMemoizer,
|
||||||
|
) -> Option<String> {
|
||||||
|
format_number_values(val, Some(","))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn format_decimal_with_period(
|
||||||
|
val: &fluent::FluentValue,
|
||||||
|
_intl: &intl_memoizer::concurrent::IntlLangMemoizer,
|
||||||
|
) -> Option<String> {
|
||||||
|
format_number_values(val, None)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn format_number_values(
|
||||||
|
val: &fluent::FluentValue,
|
||||||
|
alt_separator: Option<&'static str>,
|
||||||
|
) -> Option<String> {
|
||||||
|
match val {
|
||||||
|
FluentValue::Number(num) => {
|
||||||
|
// create a string with desired maximum digits
|
||||||
|
let max_frac_digits = 2;
|
||||||
|
let with_max_precision = format!(
|
||||||
|
"{number:.precision$}",
|
||||||
|
number = num.value,
|
||||||
|
precision = max_frac_digits
|
||||||
|
);
|
||||||
|
|
||||||
|
// remove any excess trailing zeros
|
||||||
|
let mut val: Cow<str> = with_max_precision.trim_end_matches('0').into();
|
||||||
|
|
||||||
|
// adding back any required to meet minimum_fraction_digits
|
||||||
|
if let Some(minfd) = num.options.minimum_fraction_digits {
|
||||||
|
let pos = val.find('.').expect("expected . in formatted string");
|
||||||
|
let frac_num = val.len() - pos - 1;
|
||||||
|
let zeros_needed = minfd - frac_num;
|
||||||
|
if zeros_needed > 0 {
|
||||||
|
val = format!("{}{}", val, "0".repeat(zeros_needed)).into();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// lop off any trailing '.'
|
||||||
|
let result = val.trim_end_matches('.');
|
||||||
|
|
||||||
|
if let Some(sep) = alt_separator {
|
||||||
|
Some(result.replace('.', sep))
|
||||||
|
} else {
|
||||||
|
Some(result.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct ResourcesForJavascript {
|
||||||
|
langs: Vec<String>,
|
||||||
|
resources: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use unic_langid::langid;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn numbers() {
|
||||||
|
assert_eq!(want_comma_as_decimal_separator(&[langid!("en-US")]), false);
|
||||||
|
assert_eq!(want_comma_as_decimal_separator(&[langid!("pl-PL")]), true);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn i18n() {
|
||||||
|
// English template
|
||||||
|
let i18n = I18n::new(&["zz"]);
|
||||||
|
assert_eq!(i18n.tr_("valid-key", None), "a valid key");
|
||||||
|
assert_eq!(i18n.tr_("invalid-key", None), "invalid-key");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
i18n.tr_("two-args-key", Some(tr_args!["one"=>1.1, "two"=>"2"])),
|
||||||
|
"two args: 1.1 and 2"
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
i18n.tr_("plural", Some(tr_args!["hats"=>1.0])),
|
||||||
|
"You have 1 hat."
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
i18n.tr_("plural", Some(tr_args!["hats"=>1.1])),
|
||||||
|
"You have 1.1 hats."
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
i18n.tr_("plural", Some(tr_args!["hats"=>3])),
|
||||||
|
"You have 3 hats."
|
||||||
|
);
|
||||||
|
|
||||||
|
// Another language
|
||||||
|
let i18n = I18n::new(&["ja_JP"]);
|
||||||
|
assert_eq!(i18n.tr_("valid-key", None), "キー");
|
||||||
|
assert_eq!(i18n.tr_("only-in-english", None), "not translated");
|
||||||
|
assert_eq!(i18n.tr_("invalid-key", None), "invalid-key");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
i18n.tr_("two-args-key", Some(tr_args!["one"=>1, "two"=>"2"])),
|
||||||
|
"1と2"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Decimal separator
|
||||||
|
let i18n = I18n::new(&["pl-PL"]);
|
||||||
|
// Polish will use a comma if the string is translated
|
||||||
|
assert_eq!(
|
||||||
|
i18n.tr_("one-arg-key", Some(tr_args!["one"=>2.07])),
|
||||||
|
"fake Polish 2,07"
|
||||||
|
);
|
||||||
|
|
||||||
|
// but if it falls back on English, it will use an English separator
|
||||||
|
assert_eq!(
|
||||||
|
i18n.tr_("two-args-key", Some(tr_args!["one"=>1, "two"=>2.07])),
|
||||||
|
"two args: 1 and 2.07"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -12,18 +12,14 @@ pub(super) use pb::i18n_service::Service as I18nService;
|
||||||
|
|
||||||
impl I18nService for Backend {
|
impl I18nService for Backend {
|
||||||
fn translate_string(&self, input: pb::TranslateStringIn) -> Result<pb::String> {
|
fn translate_string(&self, input: pb::TranslateStringIn) -> Result<pb::String> {
|
||||||
let key = match crate::fluent_proto::FluentString::from_i32(input.key) {
|
let key = input.key;
|
||||||
Some(key) => key,
|
|
||||||
None => return Ok("invalid key".to_string().into()),
|
|
||||||
};
|
|
||||||
|
|
||||||
let map = input
|
let map = input
|
||||||
.args
|
.args
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(k, v)| (k.as_str(), translate_arg_to_fluent_val(&v)))
|
.map(|(k, v)| (k.as_str(), translate_arg_to_fluent_val(&v)))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
Ok(self.i18n.trn(key, map).into())
|
Ok(self.i18n.trn2(key as usize, map).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn format_timespan(&self, input: pb::FormatTimespanIn) -> Result<pb::String> {
|
fn format_timespan(&self, input: pb::FormatTimespanIn) -> Result<pb::String> {
|
||||||
|
|
|
||||||
|
|
@ -48,7 +48,6 @@ use crate::{
|
||||||
collection::Collection,
|
collection::Collection,
|
||||||
err::{AnkiError, Result},
|
err::{AnkiError, Result},
|
||||||
i18n::I18n,
|
i18n::I18n,
|
||||||
log,
|
|
||||||
};
|
};
|
||||||
use once_cell::sync::OnceCell;
|
use once_cell::sync::OnceCell;
|
||||||
use progress::AbortHandleSlot;
|
use progress::AbortHandleSlot;
|
||||||
|
|
@ -82,11 +81,7 @@ pub fn init_backend(init_msg: &[u8]) -> std::result::Result<Backend, String> {
|
||||||
Err(_) => return Err("couldn't decode init request".into()),
|
Err(_) => return Err("couldn't decode init request".into()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let i18n = I18n::new(
|
let i18n = I18n::new(&input.preferred_langs);
|
||||||
&input.preferred_langs,
|
|
||||||
input.locale_folder_path,
|
|
||||||
log::terminal(),
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(Backend::new(i18n, input.server))
|
Ok(Backend::new(i18n, input.server))
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -55,7 +55,7 @@ pub fn open_test_collection() -> Collection {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub fn open_test_collection_with_server(server: bool) -> Collection {
|
pub fn open_test_collection_with_server(server: bool) -> Collection {
|
||||||
use crate::log;
|
use crate::log;
|
||||||
let i18n = I18n::new(&[""], "", log::terminal());
|
let i18n = I18n::template_only();
|
||||||
open_collection(":memory:", "", "", server, i18n, log::terminal()).unwrap()
|
open_collection(":memory:", "", "", server, i18n, log::terminal()).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|
||||||
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/fluent_proto.rs"));
|
|
||||||
|
|
@ -1,583 +1,4 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
use crate::err::Result;
|
pub use anki_i18n::{tr_args, tr_strs, I18n, TR};
|
||||||
use crate::log::{error, Logger};
|
|
||||||
use fluent::{concurrent::FluentBundle, FluentArgs, FluentResource, FluentValue};
|
|
||||||
use num_format::Locale;
|
|
||||||
use serde::Serialize;
|
|
||||||
use std::borrow::Cow;
|
|
||||||
use std::fs;
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use std::sync::{Arc, Mutex};
|
|
||||||
use unic_langid::LanguageIdentifier;
|
|
||||||
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/fluent_keys.rs"));
|
|
||||||
|
|
||||||
pub use crate::fluent_proto::FluentString as TR;
|
|
||||||
pub use fluent::fluent_args as tr_args;
|
|
||||||
|
|
||||||
/// Helper for creating args with &strs
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! tr_strs {
|
|
||||||
( $($key:expr => $value:expr),* ) => {
|
|
||||||
{
|
|
||||||
let mut args: fluent::FluentArgs = fluent::FluentArgs::new();
|
|
||||||
$(
|
|
||||||
args.add($key, $value.to_string().into());
|
|
||||||
)*
|
|
||||||
args
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
pub use tr_strs;
|
|
||||||
|
|
||||||
/// The folder containing ftl files for the provided language.
|
|
||||||
/// If a fully qualified folder exists (eg, en_GB), return that.
|
|
||||||
/// Otherwise, try the language alone (eg en).
|
|
||||||
/// If neither folder exists, return None.
|
|
||||||
fn lang_folder(lang: &Option<LanguageIdentifier>, ftl_root_folder: &Path) -> Option<PathBuf> {
|
|
||||||
if let Some(lang) = lang {
|
|
||||||
if let Some(region) = lang.region {
|
|
||||||
let path = ftl_root_folder.join(format!("{}_{}", lang.language, region));
|
|
||||||
if fs::metadata(&path).is_ok() {
|
|
||||||
return Some(path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let path = ftl_root_folder.join(lang.language.to_string());
|
|
||||||
if fs::metadata(&path).is_ok() {
|
|
||||||
Some(path)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// fallback folder
|
|
||||||
let path = ftl_root_folder.join("templates");
|
|
||||||
if fs::metadata(&path).is_ok() {
|
|
||||||
Some(path)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "translations")]
|
|
||||||
macro_rules! ftl_path {
|
|
||||||
( $fname: expr ) => {
|
|
||||||
include_str!(concat!(env!("OUT_DIR"), "/", $fname))
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(feature = "translations"))]
|
|
||||||
macro_rules! ftl_path {
|
|
||||||
( "template.ftl" ) => {
|
|
||||||
include_str!(concat!(env!("OUT_DIR"), "/template.ftl"))
|
|
||||||
};
|
|
||||||
( $fname: expr ) => {
|
|
||||||
"" // translations not included
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the template/English resource text.
|
|
||||||
fn ftl_template_text() -> &'static str {
|
|
||||||
ftl_path!("template.ftl")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn ftl_localized_text(lang: &LanguageIdentifier) -> Option<&'static str> {
|
|
||||||
let region = match &lang.region {
|
|
||||||
Some(region) => Some(region.as_str()),
|
|
||||||
None => None,
|
|
||||||
};
|
|
||||||
Some(match lang.language.as_str() {
|
|
||||||
"en" => {
|
|
||||||
match region {
|
|
||||||
Some("GB") | Some("AU") => ftl_path!("en-GB.ftl"),
|
|
||||||
// use fallback language instead
|
|
||||||
_ => return None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"zh" => match region {
|
|
||||||
Some("TW") | Some("HK") => ftl_path!("zh-TW.ftl"),
|
|
||||||
_ => ftl_path!("zh-CN.ftl"),
|
|
||||||
},
|
|
||||||
"pt" => {
|
|
||||||
if let Some("PT") = region {
|
|
||||||
ftl_path!("pt-PT.ftl")
|
|
||||||
} else {
|
|
||||||
ftl_path!("pt-BR.ftl")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"ga" => ftl_path!("ga-IE.ftl"),
|
|
||||||
"hy" => ftl_path!("hy-AM.ftl"),
|
|
||||||
"nb" => ftl_path!("nb-NO.ftl"),
|
|
||||||
"sv" => ftl_path!("sv-SE.ftl"),
|
|
||||||
"jbo" => ftl_path!("jbo.ftl"),
|
|
||||||
"kab" => ftl_path!("kab.ftl"),
|
|
||||||
"af" => ftl_path!("af.ftl"),
|
|
||||||
"ar" => ftl_path!("ar.ftl"),
|
|
||||||
"bg" => ftl_path!("bg.ftl"),
|
|
||||||
"ca" => ftl_path!("ca.ftl"),
|
|
||||||
"cs" => ftl_path!("cs.ftl"),
|
|
||||||
"da" => ftl_path!("da.ftl"),
|
|
||||||
"de" => ftl_path!("de.ftl"),
|
|
||||||
"el" => ftl_path!("el.ftl"),
|
|
||||||
"eo" => ftl_path!("eo.ftl"),
|
|
||||||
"es" => ftl_path!("es.ftl"),
|
|
||||||
"et" => ftl_path!("et.ftl"),
|
|
||||||
"eu" => ftl_path!("eu.ftl"),
|
|
||||||
"fa" => ftl_path!("fa.ftl"),
|
|
||||||
"fi" => ftl_path!("fi.ftl"),
|
|
||||||
"fr" => ftl_path!("fr.ftl"),
|
|
||||||
"gl" => ftl_path!("gl.ftl"),
|
|
||||||
"he" => ftl_path!("he.ftl"),
|
|
||||||
"hr" => ftl_path!("hr.ftl"),
|
|
||||||
"hu" => ftl_path!("hu.ftl"),
|
|
||||||
"it" => ftl_path!("it.ftl"),
|
|
||||||
"ja" => ftl_path!("ja.ftl"),
|
|
||||||
"ko" => ftl_path!("ko.ftl"),
|
|
||||||
"la" => ftl_path!("la.ftl"),
|
|
||||||
"mn" => ftl_path!("mn.ftl"),
|
|
||||||
"mr" => ftl_path!("mr.ftl"),
|
|
||||||
"ms" => ftl_path!("ms.ftl"),
|
|
||||||
"nl" => ftl_path!("nl.ftl"),
|
|
||||||
"oc" => ftl_path!("oc.ftl"),
|
|
||||||
"pl" => ftl_path!("pl.ftl"),
|
|
||||||
"ro" => ftl_path!("ro.ftl"),
|
|
||||||
"ru" => ftl_path!("ru.ftl"),
|
|
||||||
"sk" => ftl_path!("sk.ftl"),
|
|
||||||
"sl" => ftl_path!("sl.ftl"),
|
|
||||||
"sr" => ftl_path!("sr.ftl"),
|
|
||||||
"th" => ftl_path!("th.ftl"),
|
|
||||||
"tr" => ftl_path!("tr.ftl"),
|
|
||||||
"uk" => ftl_path!("uk.ftl"),
|
|
||||||
"vi" => ftl_path!("vi.ftl"),
|
|
||||||
_ => return None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the text from any .ftl files in the given folder.
|
|
||||||
fn ftl_external_text(folder: &Path) -> Result<String> {
|
|
||||||
let mut buf = String::new();
|
|
||||||
for entry in fs::read_dir(folder)? {
|
|
||||||
let entry = entry?;
|
|
||||||
let fname = entry
|
|
||||||
.file_name()
|
|
||||||
.into_string()
|
|
||||||
.unwrap_or_else(|_| "".into());
|
|
||||||
if !fname.ends_with(".ftl") {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
buf += &fs::read_to_string(entry.path())?
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(buf)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Some sample text for testing purposes.
|
|
||||||
fn test_en_text() -> &'static str {
|
|
||||||
"
|
|
||||||
valid-key = a valid key
|
|
||||||
only-in-english = not translated
|
|
||||||
two-args-key = two args: {$one} and {$two}
|
|
||||||
plural = You have {$hats ->
|
|
||||||
[one] 1 hat
|
|
||||||
*[other] {$hats} hats
|
|
||||||
}.
|
|
||||||
"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn test_jp_text() -> &'static str {
|
|
||||||
"
|
|
||||||
valid-key = キー
|
|
||||||
two-args-key = {$one}と{$two}
|
|
||||||
"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn test_pl_text() -> &'static str {
|
|
||||||
"
|
|
||||||
one-arg-key = fake Polish {$one}
|
|
||||||
"
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parse resource text into an AST for inclusion in a bundle.
|
|
||||||
/// Returns None if text contains errors.
|
|
||||||
/// extra_text may contain resources loaded from the filesystem
|
|
||||||
/// at runtime. If it contains errors, they will not prevent a
|
|
||||||
/// bundle from being returned.
|
|
||||||
fn get_bundle(
|
|
||||||
text: &str,
|
|
||||||
extra_text: String,
|
|
||||||
locales: &[LanguageIdentifier],
|
|
||||||
log: &Logger,
|
|
||||||
) -> Option<FluentBundle<FluentResource>> {
|
|
||||||
let res = FluentResource::try_new(text.into())
|
|
||||||
.map_err(|e| {
|
|
||||||
error!(log, "Unable to parse translations file: {:?}", e);
|
|
||||||
})
|
|
||||||
.ok()?;
|
|
||||||
|
|
||||||
let mut bundle: FluentBundle<FluentResource> = FluentBundle::new(locales);
|
|
||||||
bundle
|
|
||||||
.add_resource(res)
|
|
||||||
.map_err(|e| {
|
|
||||||
error!(log, "Duplicate key detected in translation file: {:?}", e);
|
|
||||||
})
|
|
||||||
.ok()?;
|
|
||||||
|
|
||||||
if !extra_text.is_empty() {
|
|
||||||
match FluentResource::try_new(extra_text) {
|
|
||||||
Ok(res) => bundle.add_resource_overriding(res),
|
|
||||||
Err((_res, e)) => error!(log, "Unable to parse translations file: {:?}", e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// disable isolation characters in test mode
|
|
||||||
if cfg!(test) {
|
|
||||||
bundle.set_use_isolating(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
// add numeric formatter
|
|
||||||
set_bundle_formatter_for_langs(&mut bundle, locales);
|
|
||||||
|
|
||||||
Some(bundle)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a bundle that includes any filesystem overrides.
|
|
||||||
fn get_bundle_with_extra(
|
|
||||||
text: &str,
|
|
||||||
lang: Option<LanguageIdentifier>,
|
|
||||||
ftl_root_folder: &Path,
|
|
||||||
log: &Logger,
|
|
||||||
) -> Option<FluentBundle<FluentResource>> {
|
|
||||||
let mut extra_text = if let Some(path) = lang_folder(&lang, &ftl_root_folder) {
|
|
||||||
match ftl_external_text(&path) {
|
|
||||||
Ok(text) => text,
|
|
||||||
Err(e) => {
|
|
||||||
error!(log, "Error reading external FTL files: {:?}", e);
|
|
||||||
"".into()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
"".into()
|
|
||||||
};
|
|
||||||
|
|
||||||
if cfg!(test) {
|
|
||||||
// inject some test strings in test mode
|
|
||||||
match &lang {
|
|
||||||
None => {
|
|
||||||
extra_text += test_en_text();
|
|
||||||
}
|
|
||||||
Some(lang) if lang.language == "ja" => {
|
|
||||||
extra_text += test_jp_text();
|
|
||||||
}
|
|
||||||
Some(lang) if lang.language == "pl" => {
|
|
||||||
extra_text += test_pl_text();
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut locales = if let Some(lang) = lang {
|
|
||||||
vec![lang]
|
|
||||||
} else {
|
|
||||||
vec![]
|
|
||||||
};
|
|
||||||
locales.push("en-US".parse().unwrap());
|
|
||||||
|
|
||||||
get_bundle(text, extra_text, &locales, log)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct I18n {
|
|
||||||
inner: Arc<Mutex<I18nInner>>,
|
|
||||||
log: Logger,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl I18n {
|
|
||||||
pub fn new<S: AsRef<str>, P: Into<PathBuf>>(
|
|
||||||
locale_codes: &[S],
|
|
||||||
ftl_root_folder: P,
|
|
||||||
log: Logger,
|
|
||||||
) -> Self {
|
|
||||||
let ftl_root_folder = ftl_root_folder.into();
|
|
||||||
let mut input_langs = vec![];
|
|
||||||
let mut bundles = Vec::with_capacity(locale_codes.len() + 1);
|
|
||||||
let mut resource_text = vec![];
|
|
||||||
|
|
||||||
for code in locale_codes {
|
|
||||||
let code = code.as_ref();
|
|
||||||
if let Ok(lang) = code.parse::<LanguageIdentifier>() {
|
|
||||||
input_langs.push(lang.clone());
|
|
||||||
if lang.language == "en" {
|
|
||||||
// if English was listed, any further preferences are skipped,
|
|
||||||
// as the template has 100% coverage, and we need to ensure
|
|
||||||
// it is tried prior to any other langs.
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut output_langs = vec![];
|
|
||||||
for lang in input_langs {
|
|
||||||
// if the language is bundled in the binary
|
|
||||||
if let Some(text) = ftl_localized_text(&lang) {
|
|
||||||
if let Some(bundle) =
|
|
||||||
get_bundle_with_extra(text, Some(lang.clone()), &ftl_root_folder, &log)
|
|
||||||
{
|
|
||||||
resource_text.push(text);
|
|
||||||
bundles.push(bundle);
|
|
||||||
output_langs.push(lang);
|
|
||||||
} else {
|
|
||||||
error!(log, "Failed to create bundle for {:?}", lang.language)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// add English templates
|
|
||||||
let template_text = ftl_template_text();
|
|
||||||
let template_lang = "en-US".parse().unwrap();
|
|
||||||
let template_bundle =
|
|
||||||
get_bundle_with_extra(template_text, None, &ftl_root_folder, &log).unwrap();
|
|
||||||
resource_text.push(template_text);
|
|
||||||
bundles.push(template_bundle);
|
|
||||||
output_langs.push(template_lang);
|
|
||||||
|
|
||||||
Self {
|
|
||||||
inner: Arc::new(Mutex::new(I18nInner {
|
|
||||||
bundles,
|
|
||||||
langs: output_langs,
|
|
||||||
resource_text,
|
|
||||||
})),
|
|
||||||
log,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get translation with zero arguments.
|
|
||||||
pub fn tr(&self, key: TR) -> Cow<str> {
|
|
||||||
let key = FLUENT_KEYS[key as usize];
|
|
||||||
self.tr_(key, None)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get translation with one or more arguments.
|
|
||||||
pub fn trn(&self, key: TR, args: FluentArgs) -> String {
|
|
||||||
let key = FLUENT_KEYS[key as usize];
|
|
||||||
self.tr_(key, Some(args)).into()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn tr_<'a>(&'a self, key: &str, args: Option<FluentArgs>) -> Cow<'a, str> {
|
|
||||||
for bundle in &self.inner.lock().unwrap().bundles {
|
|
||||||
let msg = match bundle.get_message(key) {
|
|
||||||
Some(msg) => msg,
|
|
||||||
// not translated in this bundle
|
|
||||||
None => continue,
|
|
||||||
};
|
|
||||||
|
|
||||||
let pat = match msg.value {
|
|
||||||
Some(val) => val,
|
|
||||||
// empty value
|
|
||||||
None => continue,
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut errs = vec![];
|
|
||||||
let out = bundle.format_pattern(pat, args.as_ref(), &mut errs);
|
|
||||||
if !errs.is_empty() {
|
|
||||||
error!(self.log, "Error(s) in translation '{}': {:?}", key, errs);
|
|
||||||
}
|
|
||||||
// clone so we can discard args
|
|
||||||
return out.to_string().into();
|
|
||||||
}
|
|
||||||
|
|
||||||
// return the key name if it was missing
|
|
||||||
key.to_string().into()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return text from configured locales for use with the JS Fluent implementation.
|
|
||||||
pub fn resources_for_js(&self) -> ResourcesForJavascript {
|
|
||||||
let inner = self.inner.lock().unwrap();
|
|
||||||
ResourcesForJavascript {
|
|
||||||
langs: inner.langs.iter().map(ToString::to_string).collect(),
|
|
||||||
resources: inner.resource_text.clone(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct I18nInner {
|
|
||||||
// bundles in preferred language order, with template English as the
|
|
||||||
// last element
|
|
||||||
bundles: Vec<FluentBundle<FluentResource>>,
|
|
||||||
langs: Vec<LanguageIdentifier>,
|
|
||||||
resource_text: Vec<&'static str>,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Simple number formatting implementation
|
|
||||||
|
|
||||||
fn set_bundle_formatter_for_langs<T>(bundle: &mut FluentBundle<T>, langs: &[LanguageIdentifier]) {
|
|
||||||
let formatter = if want_comma_as_decimal_separator(langs) {
|
|
||||||
format_decimal_with_comma
|
|
||||||
} else {
|
|
||||||
format_decimal_with_period
|
|
||||||
};
|
|
||||||
|
|
||||||
bundle.set_formatter(Some(formatter));
|
|
||||||
}
|
|
||||||
|
|
||||||
fn first_available_num_format_locale(langs: &[LanguageIdentifier]) -> Option<Locale> {
|
|
||||||
for lang in langs {
|
|
||||||
if let Some(locale) = num_format_locale(lang) {
|
|
||||||
return Some(locale);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
// try to locate a num_format locale for a given language identifier
|
|
||||||
fn num_format_locale(lang: &LanguageIdentifier) -> Option<Locale> {
|
|
||||||
// region provided?
|
|
||||||
if let Some(region) = lang.region {
|
|
||||||
let code = format!("{}_{}", lang.language, region);
|
|
||||||
if let Ok(locale) = Locale::from_name(code) {
|
|
||||||
return Some(locale);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// try the language alone
|
|
||||||
Locale::from_name(lang.language.as_str()).ok()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn want_comma_as_decimal_separator(langs: &[LanguageIdentifier]) -> bool {
|
|
||||||
let separator = if let Some(locale) = first_available_num_format_locale(langs) {
|
|
||||||
locale.decimal()
|
|
||||||
} else {
|
|
||||||
"."
|
|
||||||
};
|
|
||||||
|
|
||||||
separator == ","
|
|
||||||
}
|
|
||||||
|
|
||||||
fn format_decimal_with_comma(
|
|
||||||
val: &fluent::FluentValue,
|
|
||||||
_intl: &intl_memoizer::concurrent::IntlLangMemoizer,
|
|
||||||
) -> Option<String> {
|
|
||||||
format_number_values(val, Some(","))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn format_decimal_with_period(
|
|
||||||
val: &fluent::FluentValue,
|
|
||||||
_intl: &intl_memoizer::concurrent::IntlLangMemoizer,
|
|
||||||
) -> Option<String> {
|
|
||||||
format_number_values(val, None)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn format_number_values(
|
|
||||||
val: &fluent::FluentValue,
|
|
||||||
alt_separator: Option<&'static str>,
|
|
||||||
) -> Option<String> {
|
|
||||||
match val {
|
|
||||||
FluentValue::Number(num) => {
|
|
||||||
// create a string with desired maximum digits
|
|
||||||
let max_frac_digits = 2;
|
|
||||||
let with_max_precision = format!(
|
|
||||||
"{number:.precision$}",
|
|
||||||
number = num.value,
|
|
||||||
precision = max_frac_digits
|
|
||||||
);
|
|
||||||
|
|
||||||
// remove any excess trailing zeros
|
|
||||||
let mut val: Cow<str> = with_max_precision.trim_end_matches('0').into();
|
|
||||||
|
|
||||||
// adding back any required to meet minimum_fraction_digits
|
|
||||||
if let Some(minfd) = num.options.minimum_fraction_digits {
|
|
||||||
let pos = val.find('.').expect("expected . in formatted string");
|
|
||||||
let frac_num = val.len() - pos - 1;
|
|
||||||
let zeros_needed = minfd - frac_num;
|
|
||||||
if zeros_needed > 0 {
|
|
||||||
val = format!("{}{}", val, "0".repeat(zeros_needed)).into();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// lop off any trailing '.'
|
|
||||||
let result = val.trim_end_matches('.');
|
|
||||||
|
|
||||||
if let Some(sep) = alt_separator {
|
|
||||||
Some(result.replace('.', sep))
|
|
||||||
} else {
|
|
||||||
Some(result.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize)]
|
|
||||||
pub struct ResourcesForJavascript {
|
|
||||||
langs: Vec<String>,
|
|
||||||
resources: Vec<&'static str>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::*;
|
|
||||||
use crate::log;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use unic_langid::langid;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn numbers() {
|
|
||||||
assert_eq!(want_comma_as_decimal_separator(&[langid!("en-US")]), false);
|
|
||||||
assert_eq!(want_comma_as_decimal_separator(&[langid!("pl-PL")]), true);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn i18n() {
|
|
||||||
let ftl_dir = PathBuf::from(std::env::var("TEST_SRCDIR").unwrap());
|
|
||||||
let log = log::terminal();
|
|
||||||
|
|
||||||
// English template
|
|
||||||
let i18n = I18n::new(&["zz"], &ftl_dir, log.clone());
|
|
||||||
assert_eq!(i18n.tr_("valid-key", None), "a valid key");
|
|
||||||
assert_eq!(i18n.tr_("invalid-key", None), "invalid-key");
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
i18n.tr_("two-args-key", Some(tr_args!["one"=>1.1, "two"=>"2"])),
|
|
||||||
"two args: 1.1 and 2"
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
i18n.tr_("plural", Some(tr_args!["hats"=>1.0])),
|
|
||||||
"You have 1 hat."
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
i18n.tr_("plural", Some(tr_args!["hats"=>1.1])),
|
|
||||||
"You have 1.1 hats."
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
i18n.tr_("plural", Some(tr_args!["hats"=>3])),
|
|
||||||
"You have 3 hats."
|
|
||||||
);
|
|
||||||
|
|
||||||
// Another language
|
|
||||||
let i18n = I18n::new(&["ja_JP"], &ftl_dir, log.clone());
|
|
||||||
assert_eq!(i18n.tr_("valid-key", None), "キー");
|
|
||||||
assert_eq!(i18n.tr_("only-in-english", None), "not translated");
|
|
||||||
assert_eq!(i18n.tr_("invalid-key", None), "invalid-key");
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
i18n.tr_("two-args-key", Some(tr_args!["one"=>1, "two"=>"2"])),
|
|
||||||
"1と2"
|
|
||||||
);
|
|
||||||
|
|
||||||
// Decimal separator
|
|
||||||
let i18n = I18n::new(&["pl-PL"], &ftl_dir, log.clone());
|
|
||||||
// Polish will use a comma if the string is translated
|
|
||||||
assert_eq!(
|
|
||||||
i18n.tr_("one-arg-key", Some(tr_args!["one"=>2.07])),
|
|
||||||
"fake Polish 2,07"
|
|
||||||
);
|
|
||||||
|
|
||||||
// but if it falls back on English, it will use an English separator
|
|
||||||
assert_eq!(
|
|
||||||
i18n.tr_("two-args-key", Some(tr_args!["one"=>1, "two"=>2.07])),
|
|
||||||
"two args: 1 and 2.07"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,6 @@ pub mod deckconf;
|
||||||
pub mod decks;
|
pub mod decks;
|
||||||
pub mod err;
|
pub mod err;
|
||||||
pub mod findreplace;
|
pub mod findreplace;
|
||||||
mod fluent_proto;
|
|
||||||
pub mod i18n;
|
pub mod i18n;
|
||||||
pub mod latex;
|
pub mod latex;
|
||||||
pub mod log;
|
pub mod log;
|
||||||
|
|
|
||||||
|
|
@ -551,7 +551,7 @@ pub(crate) mod test {
|
||||||
let mgr = MediaManager::new(&media_dir, media_db.clone())?;
|
let mgr = MediaManager::new(&media_dir, media_db.clone())?;
|
||||||
|
|
||||||
let log = log::terminal();
|
let log = log::terminal();
|
||||||
let i18n = I18n::new(&["zz"], "dummy", log.clone());
|
let i18n = I18n::template_only();
|
||||||
|
|
||||||
let col = open_collection(col_path, media_dir, media_db, false, i18n, log)?;
|
let col = open_collection(col_path, media_dir, media_db, false, i18n, log)?;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -168,13 +168,11 @@ impl Timespan {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use crate::i18n::I18n;
|
use crate::i18n::I18n;
|
||||||
use crate::log;
|
|
||||||
use crate::scheduler::timespan::{answer_button_time, time_span, MONTH};
|
use crate::scheduler::timespan::{answer_button_time, time_span, MONTH};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn answer_buttons() {
|
fn answer_buttons() {
|
||||||
let log = log::terminal();
|
let i18n = I18n::template_only();
|
||||||
let i18n = I18n::new(&["zz"], "", log);
|
|
||||||
assert_eq!(answer_button_time(30.0, &i18n), "30s");
|
assert_eq!(answer_button_time(30.0, &i18n), "30s");
|
||||||
assert_eq!(answer_button_time(70.0, &i18n), "1m");
|
assert_eq!(answer_button_time(70.0, &i18n), "1m");
|
||||||
assert_eq!(answer_button_time(1.1 * MONTH, &i18n), "1.1mo");
|
assert_eq!(answer_button_time(1.1 * MONTH, &i18n), "1.1mo");
|
||||||
|
|
@ -182,8 +180,7 @@ mod test {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn time_spans() {
|
fn time_spans() {
|
||||||
let log = log::terminal();
|
let i18n = I18n::template_only();
|
||||||
let i18n = I18n::new(&["zz"], "", log);
|
|
||||||
assert_eq!(time_span(1.0, &i18n, false), "1 second");
|
assert_eq!(time_span(1.0, &i18n, false), "1 second");
|
||||||
assert_eq!(time_span(30.3, &i18n, false), "30 seconds");
|
assert_eq!(time_span(30.3, &i18n, false), "30 seconds");
|
||||||
assert_eq!(time_span(30.3, &i18n, true), "30.3 seconds");
|
assert_eq!(time_span(30.3, &i18n, true), "30.3 seconds");
|
||||||
|
|
|
||||||
|
|
@ -610,7 +610,7 @@ mod test {
|
||||||
let col_path = dir.path().join("col.anki2");
|
let col_path = dir.path().join("col.anki2");
|
||||||
fs::write(&col_path, MEDIACHECK_ANKI2).unwrap();
|
fs::write(&col_path, MEDIACHECK_ANKI2).unwrap();
|
||||||
|
|
||||||
let i18n = I18n::new(&[""], "", log::terminal());
|
let i18n = I18n::template_only();
|
||||||
let mut col = open_collection(
|
let mut col = open_collection(
|
||||||
&col_path,
|
&col_path,
|
||||||
&PathBuf::new(),
|
&PathBuf::new(),
|
||||||
|
|
|
||||||
|
|
@ -29,13 +29,11 @@ impl Collection {
|
||||||
mod test {
|
mod test {
|
||||||
use super::studied_today;
|
use super::studied_today;
|
||||||
use crate::i18n::I18n;
|
use crate::i18n::I18n;
|
||||||
use crate::log;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn today() {
|
fn today() {
|
||||||
// temporary test of fluent term handling
|
// temporary test of fluent term handling
|
||||||
let log = log::terminal();
|
let i18n = I18n::template_only();
|
||||||
let i18n = I18n::new(&["zz"], "", log);
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
&studied_today(3, 13.0, &i18n).replace("\n", " "),
|
&studied_today(3, 13.0, &i18n).replace("\n", " "),
|
||||||
"Studied 3 cards in 13 seconds today (4.33s/card)"
|
"Studied 3 cards in 13 seconds today (4.33s/card)"
|
||||||
|
|
|
||||||
|
|
@ -502,12 +502,12 @@ impl super::SqliteStorage {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use crate::{card::Card, i18n::I18n, log, storage::SqliteStorage};
|
use crate::{card::Card, i18n::I18n, storage::SqliteStorage};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn add_card() {
|
fn add_card() {
|
||||||
let i18n = I18n::new(&[""], "", log::terminal());
|
let i18n = I18n::template_only();
|
||||||
let storage = SqliteStorage::open_or_create(Path::new(":memory:"), &i18n, false).unwrap();
|
let storage = SqliteStorage::open_or_create(Path::new(":memory:"), &i18n, false).unwrap();
|
||||||
let mut card = Card::default();
|
let mut card = Card::default();
|
||||||
storage.add_card(&mut card).unwrap();
|
storage.add_card(&mut card).unwrap();
|
||||||
|
|
|
||||||
|
|
@ -1235,7 +1235,7 @@ mod test {
|
||||||
|
|
||||||
fn open_col(dir: &Path, server: bool, fname: &str) -> Result<Collection> {
|
fn open_col(dir: &Path, server: bool, fname: &str) -> Result<Collection> {
|
||||||
let path = dir.join(fname);
|
let path = dir.join(fname);
|
||||||
let i18n = I18n::new(&[""], "", log::terminal());
|
let i18n = I18n::template_only();
|
||||||
open_collection(path, "".into(), "".into(), server, i18n, log::terminal())
|
open_collection(path, "".into(), "".into(), server, i18n, log::terminal())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -810,7 +810,6 @@ mod test {
|
||||||
use crate::err::TemplateError;
|
use crate::err::TemplateError;
|
||||||
use crate::{
|
use crate::{
|
||||||
i18n::I18n,
|
i18n::I18n,
|
||||||
log,
|
|
||||||
template::{field_is_empty, nonempty_fields, FieldRequirements, RenderContext},
|
template::{field_is_empty, nonempty_fields, FieldRequirements, RenderContext},
|
||||||
};
|
};
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
|
|
@ -1128,7 +1127,7 @@ mod test {
|
||||||
.map(|r| (r.0, r.1.into()))
|
.map(|r| (r.0, r.1.into()))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let i18n = I18n::new(&[""], "", log::terminal());
|
let i18n = I18n::template_only();
|
||||||
use crate::template::RenderedNode as FN;
|
use crate::template::RenderedNode as FN;
|
||||||
|
|
||||||
let qnodes = super::render_card("test{{E}}", "", &map, 1, false, &i18n)
|
let qnodes = super::render_card("test{{E}}", "", &map, 1, false, &i18n)
|
||||||
|
|
|
||||||
|
|
@ -49,7 +49,6 @@ esbuild(
|
||||||
"index",
|
"index",
|
||||||
"//ts/lib",
|
"//ts/lib",
|
||||||
"//ts/lib:backend_proto",
|
"//ts/lib:backend_proto",
|
||||||
"//ts/lib:fluent_proto",
|
|
||||||
"//ts/sass:core_css",
|
"//ts/sass:core_css",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -33,9 +33,9 @@ ts_library(
|
||||||
exclude = ["index.ts"],
|
exclude = ["index.ts"],
|
||||||
),
|
),
|
||||||
deps = [
|
deps = [
|
||||||
"//ts/sveltelib",
|
|
||||||
"//ts/lib",
|
"//ts/lib",
|
||||||
"//ts/lib:backend_proto",
|
"//ts/lib:backend_proto",
|
||||||
|
"//ts/sveltelib",
|
||||||
"@npm//@types/d3",
|
"@npm//@types/d3",
|
||||||
"@npm//@types/lodash",
|
"@npm//@types/lodash",
|
||||||
"@npm//d3",
|
"@npm//d3",
|
||||||
|
|
@ -64,7 +64,6 @@ esbuild(
|
||||||
"//ts/sveltelib",
|
"//ts/sveltelib",
|
||||||
"//ts/lib",
|
"//ts/lib",
|
||||||
"//ts/lib:backend_proto",
|
"//ts/lib:backend_proto",
|
||||||
"//ts/lib:fluent_proto",
|
|
||||||
":index",
|
":index",
|
||||||
"//ts/sass:core_css",
|
"//ts/sass:core_css",
|
||||||
] + svelte_names,
|
] + svelte_names,
|
||||||
|
|
|
||||||
|
|
@ -6,24 +6,45 @@ load("//ts:protobuf.bzl", "protobufjs_library")
|
||||||
# Protobuf
|
# Protobuf
|
||||||
#############
|
#############
|
||||||
|
|
||||||
protobufjs_library(
|
|
||||||
name = "fluent_proto",
|
|
||||||
proto = "//rslib:fluent_proto_lib",
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
)
|
|
||||||
|
|
||||||
protobufjs_library(
|
protobufjs_library(
|
||||||
name = "backend_proto",
|
name = "backend_proto",
|
||||||
proto = "//rslib:backend_proto_lib",
|
proto = "//rslib:backend_proto_lib",
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
################
|
||||||
|
|
||||||
|
load("@rules_python//python:defs.bzl", "py_binary")
|
||||||
|
load("@py_deps//:requirements.bzl", "requirement")
|
||||||
|
|
||||||
|
py_binary(
|
||||||
|
name = "genfluent",
|
||||||
|
srcs = [
|
||||||
|
"genfluent.py",
|
||||||
|
],
|
||||||
|
deps = [
|
||||||
|
requirement("black"),
|
||||||
|
requirement("stringcase"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
genrule(
|
||||||
|
name = "fluent_gen",
|
||||||
|
outs = ["i18n_generated.ts"],
|
||||||
|
cmd = "$(location genfluent) $(location //rslib/i18n:strings.json) $@",
|
||||||
|
tools = [
|
||||||
|
"genfluent",
|
||||||
|
"//rslib/i18n:strings.json",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
# Anki Library
|
# Anki Library
|
||||||
################
|
################
|
||||||
|
|
||||||
ts_library(
|
ts_library(
|
||||||
name = "lib",
|
name = "lib",
|
||||||
srcs = glob(["**/*.ts"]),
|
srcs = glob(["**/*.ts"]) + [":i18n_generated.ts"],
|
||||||
data = [
|
data = [
|
||||||
"backend_proto",
|
"backend_proto",
|
||||||
],
|
],
|
||||||
|
|
@ -32,7 +53,6 @@ ts_library(
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
"backend_proto",
|
"backend_proto",
|
||||||
"fluent_proto",
|
|
||||||
"@npm//@fluent/bundle",
|
"@npm//@fluent/bundle",
|
||||||
"@npm//@types/long",
|
"@npm//@types/long",
|
||||||
"@npm//intl-pluralrules",
|
"@npm//intl-pluralrules",
|
||||||
|
|
|
||||||
71
ts/lib/genfluent.py
Normal file
71
ts/lib/genfluent.py
Normal file
|
|
@ -0,0 +1,71 @@
|
||||||
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
import stringcase
|
||||||
|
|
||||||
|
strings_json, outfile = sys.argv[1:]
|
||||||
|
modules = json.load(open(strings_json))
|
||||||
|
|
||||||
|
|
||||||
|
def legacy_enum() -> str:
|
||||||
|
out = ["export enum LegacyEnum {"]
|
||||||
|
for module in modules:
|
||||||
|
for translation in module["translations"]:
|
||||||
|
key = stringcase.constcase(translation["key"])
|
||||||
|
value = module["index"] * 1000 + translation["index"]
|
||||||
|
out.append(f" {key} = {value},")
|
||||||
|
|
||||||
|
out.append("}")
|
||||||
|
return "\n".join(out) + "\n"
|
||||||
|
|
||||||
|
|
||||||
|
def methods() -> str:
|
||||||
|
out = [
|
||||||
|
"class AnkiTranslations:",
|
||||||
|
" def _translate(self, module: int, translation: int, args: Dict) -> str:",
|
||||||
|
" raise Exception('not implemented')",
|
||||||
|
]
|
||||||
|
for module in modules:
|
||||||
|
for translation in module["translations"]:
|
||||||
|
key = translation["key"].replace("-", "_")
|
||||||
|
arg_types = get_arg_types(translation["variables"])
|
||||||
|
args = get_args(translation["variables"])
|
||||||
|
doc = translation["text"]
|
||||||
|
out.append(
|
||||||
|
f"""
|
||||||
|
def {key}(self, {arg_types}) -> str:
|
||||||
|
r''' {doc} '''
|
||||||
|
return self._translate({module["index"]}, {translation["index"]}, {{{args}}})
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
return "\n".join(out) + "\n"
|
||||||
|
|
||||||
|
|
||||||
|
def get_arg_types(args: List[str]) -> str:
|
||||||
|
return ", ".join([f"{stringcase.snakecase(arg)}: FluentVariable" for arg in args])
|
||||||
|
|
||||||
|
|
||||||
|
def get_args(args: List[str]) -> str:
|
||||||
|
return ", ".join([f'"{arg}": {stringcase.snakecase(arg)}' for arg in args])
|
||||||
|
|
||||||
|
|
||||||
|
out = ""
|
||||||
|
|
||||||
|
out += legacy_enum()
|
||||||
|
# out += methods()
|
||||||
|
|
||||||
|
|
||||||
|
open(outfile, "wb").write(
|
||||||
|
(
|
||||||
|
"""// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
"""
|
||||||
|
+ out
|
||||||
|
).encode("utf8")
|
||||||
|
)
|
||||||
|
|
@ -1,9 +1,9 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
import pb from "./fluent_proto";
|
|
||||||
import "intl-pluralrules";
|
import "intl-pluralrules";
|
||||||
import { FluentBundle, FluentResource, FluentNumber } from "@fluent/bundle/compat";
|
import { FluentBundle, FluentResource, FluentNumber } from "@fluent/bundle/compat";
|
||||||
|
import { LegacyEnum } from "./i18n_generated";
|
||||||
|
|
||||||
type RecordVal = number | string | FluentNumber;
|
type RecordVal = number | string | FluentNumber;
|
||||||
|
|
||||||
|
|
@ -23,9 +23,9 @@ function formatNumbers(args?: Record<string, RecordVal>): void {
|
||||||
export class I18n {
|
export class I18n {
|
||||||
bundles: FluentBundle[] = [];
|
bundles: FluentBundle[] = [];
|
||||||
langs: string[] = [];
|
langs: string[] = [];
|
||||||
TR = pb.FluentProto.FluentString;
|
TR = LegacyEnum;
|
||||||
|
|
||||||
tr(id: pb.FluentProto.FluentString, args?: Record<string, RecordVal>): string {
|
tr(id: LegacyEnum, args?: Record<string, RecordVal>): string {
|
||||||
formatNumbers(args);
|
formatNumbers(args);
|
||||||
const key = this.keyName(id);
|
const key = this.keyName(id);
|
||||||
for (const bundle of this.bundles) {
|
for (const bundle of this.bundles) {
|
||||||
|
|
@ -66,7 +66,7 @@ export class I18n {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private keyName(msg: pb.FluentProto.FluentString): string {
|
private keyName(msg: LegacyEnum): string {
|
||||||
return this.TR[msg].toLowerCase().replace(/_/g, "-");
|
return this.TR[msg].toLowerCase().replace(/_/g, "-");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue