rework Fluent handling

- all .ftl files for a language are concatenated into a single file
at build time
- all languages are included in the binary
- external ftl files placed in the ftl folder can override the
built-in definitions
- constants are automatically generated for each string key
- dropped the separate StringsGroup enum
This commit is contained in:
Damien Elmes 2020-02-23 12:21:12 +10:00
parent 05d7852910
commit 4430c67069
44 changed files with 576 additions and 351 deletions

View file

@ -21,6 +21,7 @@ To start, make sure you have the following installed:
- gettext - gettext
- rename - rename
- rsync - rsync
- perl
The build scripts assume a UNIX-like environment, so on Windows you will The build scripts assume a UNIX-like environment, so on Windows you will
need to use WSL or Cygwin to use them. need to use WSL or Cygwin to use them.

1
proto/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
fluent.proto

View file

@ -1,5 +1,7 @@
syntax = "proto3"; syntax = "proto3";
import "fluent.proto";
package backend_proto; package backend_proto;
message Empty {} message Empty {}
@ -12,19 +14,6 @@ message BackendInit {
string locale_folder_path = 5; string locale_folder_path = 5;
} }
enum StringsGroup {
OTHER = 0;
TEST = 1;
MEDIA_CHECK = 2;
CARD_TEMPLATES = 3;
SYNC = 4;
NETWORK = 5;
STATISTICS = 6;
FILTERING = 7;
SCHEDULING = 8;
DECK_CONFIG = 9;
}
// 1-15 reserved for future use; 2047 for errors // 1-15 reserved for future use; 2047 for errors
message BackendInput { message BackendInput {
@ -299,8 +288,7 @@ message TrashMediaFilesIn {
} }
message TranslateStringIn { message TranslateStringIn {
StringsGroup group = 1; FluentString key = 2;
string key = 2;
map<string,TranslateArgValue> args = 3; map<string,TranslateArgValue> args = 3;
} }

1
pylib/.gitignore vendored
View file

@ -11,6 +11,7 @@
__pycache__ __pycache__
anki.egg-info anki.egg-info
anki/backend_pb2.* anki/backend_pb2.*
anki/fluent_pb2.*
anki/buildhash.py anki/buildhash.py
build build
dist dist

View file

@ -1,5 +1,5 @@
[settings] [settings]
skip=aqt/forms,anki/backend_pb2.py,backend_pb2.pyi skip=aqt/forms,backend_pb2.py,backend_pb2.pyi,fluent_pb2.py,fluent_pb2.pyi
multi_line_output=3 multi_line_output=3
include_trailing_comma=True include_trailing_comma=True
force_grid_wrap=0 force_grid_wrap=0

View file

@ -1,3 +1,6 @@
[MASTER]
ignore-patterns=.*_pb2.*
[MESSAGES CONTROL] [MESSAGES CONTROL]
disable=C,R, disable=C,R,
fixme, fixme,

View file

@ -5,7 +5,7 @@ MAKEFLAGS += --warn-undefined-variables
MAKEFLAGS += --no-builtin-rules MAKEFLAGS += --no-builtin-rules
RUNARGS := RUNARGS :=
.SUFFIXES: .SUFFIXES:
BLACKARGS := -t py36 anki tests setup.py tools/*.py --exclude='backend_pb2|buildinfo' BLACKARGS := -t py36 anki tests setup.py tools/*.py --exclude='_pb2|buildinfo'
ISORTARGS := anki tests setup.py ISORTARGS := anki tests setup.py
$(shell mkdir -p .build ../dist) $(shell mkdir -p .build ../dist)
@ -25,6 +25,9 @@ PROTODEPS := $(wildcard ../proto/*.proto)
.build/py-proto: .build/dev-deps $(PROTODEPS) .build/py-proto: .build/dev-deps $(PROTODEPS)
protoc --proto_path=../proto --python_out=anki --mypy_out=anki $(PROTODEPS) protoc --proto_path=../proto --python_out=anki --mypy_out=anki $(PROTODEPS)
# fixup import path
perl -i'' -pe 's/from fluent_pb2/from anki.fluent_pb2/' anki/backend_pb2.pyi
perl -i'' -pe 's/import fluent_pb2/import anki.fluent_pb2/' anki/backend_pb2.py
@touch $@ @touch $@
.build/hooks: tools/genhooks.py tools/hookslib.py .build/hooks: tools/genhooks.py tools/hookslib.py

View file

@ -12,6 +12,7 @@ import ankirspy # pytype: disable=import-error
import anki.backend_pb2 as pb import anki.backend_pb2 as pb
import anki.buildinfo import anki.buildinfo
from anki import hooks from anki import hooks
from anki.fluent_pb2 import FluentString as FString
from anki.models import AllTemplateReqs from anki.models import AllTemplateReqs
from anki.sound import AVTag, SoundOrVideoTag, TTSTag from anki.sound import AVTag, SoundOrVideoTag, TTSTag
from anki.types import assert_impossible_literal from anki.types import assert_impossible_literal
@ -132,8 +133,6 @@ MediaSyncProgress = pb.MediaSyncProgress
MediaCheckOutput = pb.MediaCheckOut MediaCheckOutput = pb.MediaCheckOut
StringsGroup = pb.StringsGroup
FormatTimeSpanContext = pb.FormatTimeSpanIn.Context FormatTimeSpanContext = pb.FormatTimeSpanIn.Context
@ -329,9 +328,7 @@ class RustBackend:
pb.BackendInput(trash_media_files=pb.TrashMediaFilesIn(fnames=fnames)) pb.BackendInput(trash_media_files=pb.TrashMediaFilesIn(fnames=fnames))
) )
def translate( def translate(self, key: FString, **kwargs: Union[str, int, float]):
self, group: pb.StringsGroup, key: str, **kwargs: Union[str, int, float]
):
args = {} args = {}
for (k, v) in kwargs.items(): for (k, v) in kwargs.items():
if isinstance(v, str): if isinstance(v, str):
@ -340,9 +337,7 @@ class RustBackend:
args[k] = pb.TranslateArgValue(number=v) args[k] = pb.TranslateArgValue(number=v)
return self._run_command( return self._run_command(
pb.BackendInput( pb.BackendInput(translate_string=pb.TranslateStringIn(key=key, args=args))
translate_string=pb.TranslateStringIn(group=group, key=key, args=args)
)
).translate_string ).translate_string
def format_time_span( def format_time_span(

View file

@ -11,7 +11,7 @@ from typing import Any, Dict, List, Optional, Tuple
import anki import anki
from anki.consts import * from anki.consts import *
from anki.lang import _, ngettext from anki.lang import _, ngettext
from anki.rsbackend import StringsGroup from anki.rsbackend import FString
from anki.utils import fmtTimeSpan, ids2str from anki.utils import fmtTimeSpan, ids2str
# Card stats # Card stats
@ -48,8 +48,7 @@ class CardStats:
next = self.date(next) next = self.date(next)
if next: if next:
self.addLine( self.addLine(
self.col.backend.translate(StringsGroup.STATISTICS, "due-date"), self.col.backend.translate(FString.STATISTICS_DUE_DATE), next,
next,
) )
if c.queue == QUEUE_TYPE_REV: if c.queue == QUEUE_TYPE_REV:
self.addLine( self.addLine(
@ -279,7 +278,7 @@ from revlog where id > ? """
self._line( self._line(
i, i,
_("Total"), _("Total"),
self.col.backend.translate(StringsGroup.STATISTICS, "reviews", reviews=tot), self.col.backend.translate(FString.STATISTICS_REVIEWS, reviews=tot),
) )
self._line(i, _("Average"), self._avgDay(tot, num, _("reviews"))) self._line(i, _("Average"), self._avgDay(tot, num, _("reviews")))
tomorrow = self.col.db.scalar( tomorrow = self.col.db.scalar(
@ -457,8 +456,7 @@ group by day order by day"""
i, i,
_("Average answer time"), _("Average answer time"),
self.col.backend.translate( self.col.backend.translate(
StringsGroup.STATISTICS, FString.STATISTICS_AVERAGE_ANSWER_TIME,
"average-answer-time",
**{"cards-per-minute": perMin, "average-seconds": average_secs}, **{"cards-per-minute": perMin, "average-seconds": average_secs},
), ),
) )

View file

@ -5,7 +5,7 @@ import tempfile
from anki import Collection as aopen from anki import Collection as aopen
from anki.lang import without_unicode_isolation from anki.lang import without_unicode_isolation
from anki.rsbackend import StringsGroup from anki.rsbackend import FString
from anki.stdmodels import addBasicModel, models from anki.stdmodels import addBasicModel, models
from anki.utils import isWin from anki.utils import isWin
from tests.shared import assertException, getEmptyCol from tests.shared import assertException, getEmptyCol
@ -156,7 +156,9 @@ def test_translate():
tr = d.backend.translate tr = d.backend.translate
no_uni = without_unicode_isolation no_uni = without_unicode_isolation
assert tr(StringsGroup.TEST, "valid-key") == "a valid key" assert (
assert "invalid-key" in tr(StringsGroup.TEST, "invalid-key") tr(FString.CARD_TEMPLATE_RENDERING_FRONT_SIDE_PROBLEM)
assert no_uni(tr(StringsGroup.TEST, "plural", hats=1)) == "You have 1 hat." == "Front template has a problem:"
assert no_uni(tr(StringsGroup.TEST, "plural", hats=2)) == "You have 2 hats." )
assert no_uni(tr(FString.STATISTICS_REVIEWS, reviews=1)) == "1 review"
assert no_uni(tr(FString.STATISTICS_REVIEWS, reviews=2)) == "2 reviews"

View file

@ -25,8 +25,8 @@ all: check
./tools/build_ui.sh ./tools/build_ui.sh
@touch $@ @touch $@
.build/i18n: $(wildcard i18n/po/desktop/*/anki.po) $(wildcard i18n/ftl/core/*/*.ftl) .build/i18n: $(wildcard i18n/po/desktop/*/anki.po)
(cd i18n && ./pull-git && ./build-mo-files && ./copy-qt-files && ./copy-ftl-files) (cd i18n && ./pull-git && ./build-mo-files && ./copy-qt-files)
@touch $@ @touch $@
TSDEPS := $(wildcard ts/src/*.ts) $(wildcard ts/scss/*.scss) TSDEPS := $(wildcard ts/src/*.ts) $(wildcard ts/scss/*.scss)

View file

@ -24,7 +24,7 @@ from anki.consts import *
from anki.lang import _, ngettext from anki.lang import _, ngettext
from anki.models import NoteType from anki.models import NoteType
from anki.notes import Note from anki.notes import Note
from anki.rsbackend import StringsGroup from anki.rsbackend import FString
from anki.utils import fmtTimeSpan, htmlToTextLine, ids2str, intTime, isMac, isWin from anki.utils import fmtTimeSpan, htmlToTextLine, ids2str, intTime, isMac, isWin
from aqt import AnkiQt, gui_hooks from aqt import AnkiQt, gui_hooks
from aqt.editor import Editor from aqt.editor import Editor
@ -356,7 +356,7 @@ class DataModel(QAbstractTableModel):
elif c.queue == QUEUE_TYPE_LRN: elif c.queue == QUEUE_TYPE_LRN:
date = c.due date = c.due
elif c.queue == QUEUE_TYPE_NEW or c.type == CARD_TYPE_NEW: elif c.queue == QUEUE_TYPE_NEW or c.type == CARD_TYPE_NEW:
return tr(StringsGroup.STATISTICS, "due-for-new-card", number=c.due) return tr(FString.STATISTICS_DUE_FOR_NEW_CARD, number=c.due)
elif c.queue in (QUEUE_TYPE_REV, QUEUE_TYPE_DAY_LEARN_RELEARN) or ( elif c.queue in (QUEUE_TYPE_REV, QUEUE_TYPE_DAY_LEARN_RELEARN) or (
c.type == CARD_TYPE_REV and c.queue < 0 c.type == CARD_TYPE_REV and c.queue < 0
): ):
@ -730,7 +730,7 @@ class Browser(QMainWindow):
("noteCrt", _("Created")), ("noteCrt", _("Created")),
("noteMod", _("Edited")), ("noteMod", _("Edited")),
("cardMod", _("Changed")), ("cardMod", _("Changed")),
("cardDue", tr(StringsGroup.STATISTICS, "due-date")), ("cardDue", tr(FString.STATISTICS_DUE_DATE)),
("cardIvl", _("Interval")), ("cardIvl", _("Interval")),
("cardEase", _("Ease")), ("cardEase", _("Ease")),
("cardReps", _("Reviews")), ("cardReps", _("Reviews")),
@ -1272,7 +1272,7 @@ by clicking on one on the left."""
(_("New"), "is:new"), (_("New"), "is:new"),
(_("Learning"), "is:learn"), (_("Learning"), "is:learn"),
(_("Review"), "is:review"), (_("Review"), "is:review"),
(tr(StringsGroup.FILTERING, "is-due"), "is:due"), (tr(FString.FILTERING_IS_DUE), "is:due"),
None, None,
(_("Suspended"), "is:suspended"), (_("Suspended"), "is:suspended"),
(_("Buried"), "is:buried"), (_("Buried"), "is:buried"),

View file

@ -11,7 +11,7 @@ from typing import Any
import aqt import aqt
from anki.errors import DeckRenameError from anki.errors import DeckRenameError
from anki.lang import _, ngettext from anki.lang import _, ngettext
from anki.rsbackend import StringsGroup from anki.rsbackend import FString
from anki.utils import ids2str from anki.utils import ids2str
from aqt import AnkiQt, gui_hooks from aqt import AnkiQt, gui_hooks
from aqt.qt import * from aqt.qt import *
@ -185,7 +185,7 @@ where id > ?""",
<tr><th colspan=5 align=left>%s</th><th class=count>%s</th> <tr><th colspan=5 align=left>%s</th><th class=count>%s</th>
<th class=count>%s</th><th class=optscol></th></tr>""" % ( <th class=count>%s</th><th class=optscol></th></tr>""" % (
_("Deck"), _("Deck"),
tr(StringsGroup.STATISTICS, "due-count"), tr(FString.STATISTICS_DUE_COUNT),
_("New"), _("New"),
) )
buf += self._topLevelDragRow() buf += self._topLevelDragRow()

View file

@ -11,11 +11,11 @@ from typing import Iterable, List, Optional, TypeVar
import aqt import aqt
from anki import hooks from anki import hooks
from anki.rsbackend import ( from anki.rsbackend import (
FString,
Interrupted, Interrupted,
MediaCheckOutput, MediaCheckOutput,
Progress, Progress,
ProgressKind, ProgressKind,
StringsGroup,
) )
from aqt.qt import * from aqt.qt import *
from aqt.utils import askUser, restoreGeom, saveGeom, showText, tooltip, tr from aqt.utils import askUser, restoreGeom, saveGeom, showText, tooltip, tr
@ -89,14 +89,14 @@ class MediaChecker:
layout.addWidget(box) layout.addWidget(box)
if output.unused: if output.unused:
b = QPushButton(tr(StringsGroup.MEDIA_CHECK, "delete-unused")) b = QPushButton(tr(FString.MEDIA_CHECK_DELETE_UNUSED))
b.setAutoDefault(False) b.setAutoDefault(False)
box.addButton(b, QDialogButtonBox.RejectRole) box.addButton(b, QDialogButtonBox.RejectRole)
b.clicked.connect(lambda c: self._on_trash_files(output.unused)) # type: ignore b.clicked.connect(lambda c: self._on_trash_files(output.unused)) # type: ignore
if output.missing: if output.missing:
if any(map(lambda x: x.startswith("latex-"), output.missing)): if any(map(lambda x: x.startswith("latex-"), output.missing)):
b = QPushButton(tr(StringsGroup.MEDIA_CHECK, "render-latex")) b = QPushButton(tr(FString.MEDIA_CHECK_RENDER_LATEX))
b.setAutoDefault(False) b.setAutoDefault(False)
box.addButton(b, QDialogButtonBox.RejectRole) box.addButton(b, QDialogButtonBox.RejectRole)
b.clicked.connect(self._on_render_latex) # type: ignore b.clicked.connect(self._on_render_latex) # type: ignore
@ -125,17 +125,17 @@ class MediaChecker:
browser.onSearchActivated() browser.onSearchActivated()
showText(err, type="html") showText(err, type="html")
else: else:
tooltip(tr(StringsGroup.MEDIA_CHECK, "all-latex-rendered")) tooltip(tr(FString.MEDIA_CHECK_ALL_LATEX_RENDERED))
def _on_render_latex_progress(self, count: int) -> bool: def _on_render_latex_progress(self, count: int) -> bool:
if self.progress_dialog.wantCancel: if self.progress_dialog.wantCancel:
return False return False
self.mw.progress.update(tr(StringsGroup.MEDIA_CHECK, "checked", count=count)) self.mw.progress.update(tr(FString.MEDIA_CHECK_CHECKED, count=count))
return True return True
def _on_trash_files(self, fnames: List[str]): def _on_trash_files(self, fnames: List[str]):
if not askUser(tr(StringsGroup.MEDIA_CHECK, "delete-unused-confirm")): if not askUser(tr(FString.MEDIA_CHECK_DELETE_UNUSED_CONFIRM)):
return return
self.progress_dialog = self.mw.progress.start() self.progress_dialog = self.mw.progress.start()
@ -149,10 +149,10 @@ class MediaChecker:
remaining -= len(chunk) remaining -= len(chunk)
if time.time() - last_progress >= 0.3: if time.time() - last_progress >= 0.3:
self.mw.progress.update( self.mw.progress.update(
tr(StringsGroup.MEDIA_CHECK, "files-remaining", count=remaining) tr(FString.MEDIA_CHECK_FILES_REMAINING, count=remaining)
) )
finally: finally:
self.mw.progress.finish() self.mw.progress.finish()
self.progress_dialog = None self.progress_dialog = None
tooltip(tr(StringsGroup.MEDIA_CHECK, "delete-unused-complete", count=total)) tooltip(tr(FString.MEDIA_CHECK_DELETE_UNUSED_COMPLETE, count=total))

View file

@ -11,12 +11,12 @@ from typing import List, Union
import aqt import aqt
from anki import hooks from anki import hooks
from anki.rsbackend import ( from anki.rsbackend import (
FString,
Interrupted, Interrupted,
MediaSyncProgress, MediaSyncProgress,
NetworkError, NetworkError,
Progress, Progress,
ProgressKind, ProgressKind,
StringsGroup,
SyncError, SyncError,
) )
from anki.types import assert_impossible from anki.types import assert_impossible
@ -65,10 +65,10 @@ class MediaSyncer:
return return
if not self.mw.pm.media_syncing_enabled(): if not self.mw.pm.media_syncing_enabled():
self._log_and_notify(tr(StringsGroup.SYNC, "media-disabled")) self._log_and_notify(tr(FString.SYNC_MEDIA_DISABLED))
return return
self._log_and_notify(tr(StringsGroup.SYNC, "media-starting")) self._log_and_notify(tr(FString.SYNC_MEDIA_STARTING))
self._syncing = True self._syncing = True
self._want_stop = False self._want_stop = False
gui_hooks.media_sync_did_start_or_stop(True) gui_hooks.media_sync_did_start_or_stop(True)
@ -101,19 +101,19 @@ class MediaSyncer:
if exc is not None: if exc is not None:
self._handle_sync_error(exc) self._handle_sync_error(exc)
else: else:
self._log_and_notify(tr(StringsGroup.SYNC, "media-complete")) self._log_and_notify(tr(FString.SYNC_MEDIA_COMPLETE))
def _handle_sync_error(self, exc: BaseException): def _handle_sync_error(self, exc: BaseException):
if isinstance(exc, Interrupted): if isinstance(exc, Interrupted):
self._log_and_notify(tr(StringsGroup.SYNC, "media-aborted")) self._log_and_notify(tr(FString.SYNC_MEDIA_ABORTED))
return return
self._log_and_notify(tr(StringsGroup.SYNC, "media-failed")) self._log_and_notify(tr(FString.SYNC_MEDIA_FAILED))
if isinstance(exc, SyncError): if isinstance(exc, SyncError):
showWarning(exc.localized()) showWarning(exc.localized())
elif isinstance(exc, NetworkError): elif isinstance(exc, NetworkError):
msg = exc.localized() msg = exc.localized()
msg += "\n\n" + tr(StringsGroup.NETWORK, "details", details=str(exc)) msg += "\n\n" + tr(FString.NETWORK_DETAILS, details=str(exc))
else: else:
raise exc raise exc
@ -123,7 +123,7 @@ class MediaSyncer:
def abort(self) -> None: def abort(self) -> None:
if not self.is_syncing(): if not self.is_syncing():
return return
self._log_and_notify(tr(StringsGroup.SYNC, "media-aborting")) self._log_and_notify(tr(FString.SYNC_MEDIA_ABORTING))
self._want_stop = True self._want_stop = True
def is_syncing(self) -> bool: def is_syncing(self) -> bool:
@ -166,7 +166,7 @@ class MediaSyncDialog(QDialog):
self._close_when_done = close_when_done self._close_when_done = close_when_done
self.form = aqt.forms.synclog.Ui_Dialog() self.form = aqt.forms.synclog.Ui_Dialog()
self.form.setupUi(self) self.form.setupUi(self)
self.abort_button = QPushButton(tr(StringsGroup.SYNC, "abort")) self.abort_button = QPushButton(tr(FString.SYNC_ABORT_BUTTON))
self.abort_button.clicked.connect(self._on_abort) # type: ignore self.abort_button.clicked.connect(self._on_abort) # type: ignore
self.abort_button.setAutoDefault(False) self.abort_button.setAutoDefault(False)
self.form.buttonBox.addButton(self.abort_button, QDialogButtonBox.ActionRole) self.form.buttonBox.addButton(self.abort_button, QDialogButtonBox.ActionRole)

View file

@ -12,7 +12,7 @@ from typing import Any, Optional, Union
import aqt import aqt
from anki.lang import _ from anki.lang import _
from anki.rsbackend import StringsGroup from anki.rsbackend import FString
from anki.utils import invalidFilename, isMac, isWin, noBundledLibs, versionWithBuild from anki.utils import invalidFilename, isMac, isWin, noBundledLibs, versionWithBuild
from aqt.qt import * from aqt.qt import *
from aqt.theme import theme_manager from aqt.theme import theme_manager
@ -31,13 +31,13 @@ def locale_dir() -> str:
return os.path.join(aqt_data_folder(), "locale") return os.path.join(aqt_data_folder(), "locale")
def tr(group: StringsGroup, key: str, **kwargs: Union[str, int, float]) -> str: def tr(key: FString, **kwargs: Union[str, int, float]) -> str:
"""Shortcut to access translations from the backend. """Shortcut to access translations from the backend.
(Currently) requires an open collection.""" (Currently) requires an open collection."""
if aqt.mw.col: if aqt.mw.col:
return aqt.mw.col.backend.translate(group, key, **kwargs) return aqt.mw.col.backend.translate(key, **kwargs)
else: else:
return key return repr(key)
def openHelp(section): def openHelp(section):

View file

@ -1,5 +0,0 @@
#!/bin/bash
targetDir=../aqt_data/locale/fluent
test -d $targetDir || mkdir -p $targetDir
rsync -a --delete --exclude=templates ftl/core/* $targetDir/

View file

@ -4,13 +4,8 @@ if [ ! -d po ]; then
git clone https://github.com/ankitects/anki-desktop-i18n po git clone https://github.com/ankitects/anki-desktop-i18n po
fi fi
if [ ! -d ftl ]; then
git clone https://github.com/ankitects/anki-core-i18n ftl
fi
echo "Updating translations from git..." echo "Updating translations from git..."
(cd po && git pull) (cd po && git pull)
(cd ftl && git pull)
# make sure gettext translations haven't broken something # make sure gettext translations haven't broken something
python check-po-files.py python check-po-files.py

View file

@ -1,8 +0,0 @@
#!/bin/bash
# pull any pending changes from git repos
./pull-git
# upload changes to ftl templates
./update-ftl-templates
(cd ftl && git add core; git commit -m update; git push)

View file

@ -1,3 +0,0 @@
#!/bin/bash
rsync -a --delete ../../rslib/src/i18n/*.ftl ftl/core/templates/

View file

@ -49,4 +49,5 @@ reqwest = { version = "0.10.1", features = ["json"] }
[build-dependencies] [build-dependencies]
prost-build = "0.5.0" prost-build = "0.5.0"
fluent-syntax = "0.9.2"

View file

@ -18,11 +18,12 @@ fix:
clean: clean:
rm -rf .build target rm -rf .build target
develop: .build/vernum develop: .build/vernum ftl/repo
PROTO_SOURCE := $(wildcard ../proto/*.proto) ftl/repo:
RS_SOURCE := $(wildcard src/*) (cd ftl && ./scripts/fetch-latest-translations)
ALL_SOURCE := $(RS_SOURCE) $(PROTO_SOURCE)
ALL_SOURCE := $(find src -type f) $(wildcard ftl/*.ftl)
# nightly currently required for ignoring files in rustfmt.toml # nightly currently required for ignoring files in rustfmt.toml
RUST_TOOLCHAIN := $(shell cat rust-toolchain) RUST_TOOLCHAIN := $(shell cat rust-toolchain)

View file

@ -1,8 +1,152 @@
use prost_build; use prost_build;
use std::fs;
use std::path::Path;
fn main() { use fluent_syntax::ast::{Entry::Message, ResourceEntry};
// avoid default OUT_DIR for now, for code completion use fluent_syntax::parser::parse;
fn get_identifiers(ftl_text: &str) -> Vec<String> {
let res = parse(ftl_text).unwrap();
let mut idents = vec![];
for entry in res.body {
if let ResourceEntry::Entry(Message(m)) = entry {
idents.push(m.id.name.to_string());
}
}
idents
}
fn proto_enum(idents: &[String]) -> String {
let mut buf = String::from(
r#"
syntax = "proto3";
package backend_proto;
enum FluentString {
"#,
);
for (idx, s) in idents.iter().enumerate() {
let name = s.replace("-", "_").to_uppercase();
buf += &format!(" {} = {};\n", name, idx);
}
buf += "}\n";
buf
}
fn rust_string_vec(idents: &[String]) -> String {
let mut buf = String::from(
r#"// This file is automatically generated as part of the build process.
pub(super) const FLUENT_KEYS: &[&str] = &[
"#,
);
for s in idents {
buf += &format!(" \"{}\",\n", s);
}
buf += "];\n";
buf
}
#[cfg(test)]
mod test {
use crate::i18n::extract_idents::{get_identifiers, proto_enum, rust_string_vec};
#[test]
fn all() {
let idents = get_identifiers("key-one = foo\nkey-two = bar");
assert_eq!(idents, vec!["key-one", "key-two"]);
assert_eq!(
proto_enum(&idents),
r#"
syntax = "proto3";
package backend_strings;
enum FluentString {
KEY_ONE = 0;
KEY_TWO = 1;
}
"#
);
assert_eq!(
rust_string_vec(&idents),
r#"// This file is automatically generated as part of the build process.
const FLUENT_KEYS: &[&str] = &[
"key-one",
"key-two",
];
"#
);
}
}
fn main() -> std::io::Result<()> {
// write template.ftl
let mut buf = String::new();
let mut ftl_template_dirs = vec!["./ftl".to_string()];
if let Ok(paths) = std::env::var("FTL_TEMPLATE_DIRS") {
ftl_template_dirs.extend(paths.split(",").map(|s| s.to_string()));
}
for ftl_dir in ftl_template_dirs {
let ftl_dir = Path::new(&ftl_dir);
for entry in fs::read_dir(ftl_dir)? {
let entry = entry?;
let fname = entry.file_name().into_string().unwrap();
if !fname.ends_with(".ftl") {
continue;
}
let path = entry.path();
println!("cargo:rerun-if-changed=./ftl/{}", fname);
buf += &fs::read_to_string(path)?;
}
}
let combined_ftl = Path::new("src/i18n/ftl/template.ftl");
fs::write(combined_ftl, &buf)?;
// generate code completion for ftl strings
let idents = get_identifiers(&buf);
let string_proto_path = Path::new("../proto/fluent.proto");
fs::write(string_proto_path, proto_enum(&idents))?;
let rust_string_path = Path::new("src/i18n/autogen.rs");
fs::write(rust_string_path, rust_string_vec(&idents))?;
// output protobuf generated code
// we avoid default OUT_DIR for now, as it breaks code completion
std::env::set_var("OUT_DIR", "src"); std::env::set_var("OUT_DIR", "src");
println!("cargo:rerun-if-changed=../proto/backend.proto"); println!("cargo:rerun-if-changed=../proto/backend.proto");
prost_build::compile_protos(&["../proto/backend.proto"], &["../proto"]).unwrap(); prost_build::compile_protos(&["../proto/backend.proto"], &["../proto"]).unwrap();
// write the other language ftl files
// fixme: doesn't currently support extra dirs
let mut ftl_lang_dirs = vec!["./ftl/repo/core".to_string()];
if let Ok(paths) = std::env::var("FTL_LANG_DIRS") {
ftl_lang_dirs.extend(paths.split(",").map(|s| s.to_string()));
}
for ftl_dir in ftl_lang_dirs {
for ftl_dir in fs::read_dir(ftl_dir)? {
let ftl_dir = ftl_dir?;
if ftl_dir.file_name() == "templates" {
continue;
}
let mut buf = String::new();
let lang = ftl_dir.file_name().into_string().unwrap();
for entry in fs::read_dir(ftl_dir.path())? {
let entry = entry?;
let fname = entry.file_name().into_string().unwrap();
let path = entry.path();
println!("cargo:rerun-if-changed=./ftl/{}", fname);
buf += &fs::read_to_string(path)?;
}
fs::write(format!("src/i18n/ftl/{}.ftl", lang), buf)?;
}
}
Ok(())
} }

1
rslib/ftl/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
repo

View file

@ -0,0 +1,9 @@
#!/bin/bash
echo "Downloading latest translations..."
if [ ! -d repo ]; then
git clone https://github.com/ankitects/anki-core-i18n repo
fi
(cd repo && git pull)

View file

@ -0,0 +1,10 @@
#!/bin/bash
#
# expects to be run from the ftl folder
#
test -d repo || exit 1
rsync -av --delete *.ftl repo/core/templates/
(cd repo && git add core; git commit -m update; git push)

View file

@ -5,7 +5,7 @@ use crate::backend_proto as pb;
use crate::backend_proto::backend_input::Value; use crate::backend_proto::backend_input::Value;
use crate::backend_proto::{Empty, RenderedTemplateReplacement, SyncMediaIn}; use crate::backend_proto::{Empty, RenderedTemplateReplacement, SyncMediaIn};
use crate::err::{AnkiError, NetworkErrorKind, Result, SyncErrorKind}; use crate::err::{AnkiError, NetworkErrorKind, Result, SyncErrorKind};
use crate::i18n::{tr_args, I18n, StringsGroup}; use crate::i18n::{tr_args, FString, I18n};
use crate::latex::{extract_latex, ExtractedLatex}; use crate::latex::{extract_latex, ExtractedLatex};
use crate::media::check::MediaChecker; use crate::media::check::MediaChecker;
use crate::media::sync::MediaSyncProgress; use crate::media::sync::MediaSyncProgress;
@ -397,17 +397,18 @@ impl Backend {
} }
fn translate_string(&self, input: pb::TranslateStringIn) -> String { fn translate_string(&self, input: pb::TranslateStringIn) -> String {
let group = match pb::StringsGroup::from_i32(input.group) { let key = match pb::FluentString::from_i32(input.key) {
Some(group) => group, Some(key) => key,
None => return "".to_string(), None => return "invalid key".to_string(),
}; };
let map = input let map = input
.args .args
.iter() .iter()
.map(|(k, v)| (k.as_str(), translate_arg_to_fluent_val(&v))) .map(|(k, v)| (k.as_str(), translate_arg_to_fluent_val(&v)))
.collect(); .collect();
self.i18n.get(group).trn(&input.key, map) self.i18n.trn(key, map)
} }
fn format_time_span(&self, input: pb::FormatTimeSpanIn) -> String { fn format_time_span(&self, input: pb::FormatTimeSpanIn) -> String {
@ -468,9 +469,7 @@ fn progress_to_proto_bytes(progress: Progress, i18n: &I18n) -> Vec<u8> {
value: Some(match progress { value: Some(match progress {
Progress::MediaSync(p) => pb::progress::Value::MediaSync(media_sync_progress(p, i18n)), Progress::MediaSync(p) => pb::progress::Value::MediaSync(media_sync_progress(p, i18n)),
Progress::MediaCheck(n) => { Progress::MediaCheck(n) => {
let s = i18n let s = i18n.trn(FString::MediaCheckChecked, tr_args!["count"=>n]);
.get(StringsGroup::MediaCheck)
.trn("checked", tr_args!["count"=>n]);
pb::progress::Value::MediaCheck(s) pb::progress::Value::MediaCheck(s)
} }
}), }),
@ -482,15 +481,14 @@ fn progress_to_proto_bytes(progress: Progress, i18n: &I18n) -> Vec<u8> {
} }
fn media_sync_progress(p: &MediaSyncProgress, i18n: &I18n) -> pb::MediaSyncProgress { fn media_sync_progress(p: &MediaSyncProgress, i18n: &I18n) -> pb::MediaSyncProgress {
let cat = i18n.get(StringsGroup::Sync);
pb::MediaSyncProgress { pb::MediaSyncProgress {
checked: cat.trn("media-checked-count", tr_args!["count"=>p.checked]), checked: i18n.trn(FString::SyncMediaCheckedCount, tr_args!["count"=>p.checked]),
added: cat.trn( added: i18n.trn(
"media-added-count", FString::SyncMediaAddedCount,
tr_args!["up"=>p.uploaded_files,"down"=>p.downloaded_files], tr_args!["up"=>p.uploaded_files,"down"=>p.downloaded_files],
), ),
removed: cat.trn( removed: i18n.trn(
"media-removed-count", FString::SyncMediaRemovedCount,
tr_args!["up"=>p.uploaded_deletions,"down"=>p.downloaded_deletions], tr_args!["up"=>p.uploaded_deletions,"down"=>p.downloaded_deletions],
), ),
} }

View file

@ -1,7 +1,7 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::i18n::{I18n, StringsGroup}; use crate::i18n::{FString, I18n};
pub use failure::{Error, Fail}; pub use failure::{Error, Fail};
use reqwest::StatusCode; use reqwest::StatusCode;
use std::io; use std::io;
@ -57,29 +57,23 @@ impl AnkiError {
pub fn localized_description(&self, i18n: &I18n) -> String { pub fn localized_description(&self, i18n: &I18n) -> String {
match self { match self {
AnkiError::SyncError { info, kind } => { AnkiError::SyncError { info, kind } => match kind {
let cat = i18n.get(StringsGroup::Sync); SyncErrorKind::ServerMessage => info.into(),
match kind { SyncErrorKind::Other => info.into(),
SyncErrorKind::ServerMessage => info.into(), SyncErrorKind::Conflict => i18n.tr(FString::SyncConflict),
SyncErrorKind::Other => info.into(), SyncErrorKind::ServerError => i18n.tr(FString::SyncServerError),
SyncErrorKind::Conflict => cat.tr("conflict"), SyncErrorKind::ClientTooOld => i18n.tr(FString::SyncClientTooOld),
SyncErrorKind::ServerError => cat.tr("server-error"), SyncErrorKind::AuthFailed => i18n.tr(FString::SyncWrongPass),
SyncErrorKind::ClientTooOld => cat.tr("client-too-old"), SyncErrorKind::ResyncRequired => i18n.tr(FString::SyncResyncRequired),
SyncErrorKind::AuthFailed => cat.tr("wrong-pass"),
SyncErrorKind::ResyncRequired => cat.tr("resync-required"),
}
.into()
} }
AnkiError::NetworkError { kind, .. } => { .into(),
let cat = i18n.get(StringsGroup::Network); AnkiError::NetworkError { kind, .. } => match kind {
match kind { NetworkErrorKind::Offline => i18n.tr(FString::NetworkOffline),
NetworkErrorKind::Offline => cat.tr("offline"), NetworkErrorKind::Timeout => i18n.tr(FString::NetworkTimeout),
NetworkErrorKind::Timeout => cat.tr("timeout"), NetworkErrorKind::ProxyAuth => i18n.tr(FString::NetworkProxyAuth),
NetworkErrorKind::ProxyAuth => cat.tr("proxy-auth"), NetworkErrorKind::Other => i18n.tr(FString::NetworkOther),
NetworkErrorKind::Other => cat.tr("other"),
}
.into()
} }
.into(),
_ => "".into(), _ => "".into(),
} }
} }

1
rslib/src/i18n/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
autogen.rs

1
rslib/src/i18n/ftl/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
*.ftl

View file

@ -1,9 +1,10 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::err::Result;
use fluent::{FluentArgs, FluentBundle, FluentResource, FluentValue}; use fluent::{FluentArgs, FluentBundle, FluentResource, FluentValue};
use intl_memoizer::IntlLangMemoizer; use intl_memoizer::IntlLangMemoizer;
use log::{error, warn}; use log::error;
use num_format::Locale; use num_format::Locale;
use std::borrow::Cow; use std::borrow::Cow;
use std::fs; use std::fs;
@ -11,9 +12,13 @@ use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use unic_langid::LanguageIdentifier; use unic_langid::LanguageIdentifier;
pub use crate::backend_proto::StringsGroup; mod autogen;
use crate::i18n::autogen::FLUENT_KEYS;
pub use fluent::fluent_args as tr_args; pub use fluent::fluent_args as tr_args;
pub use crate::backend_proto::FluentString as FString;
/// Helper for creating args with &strs /// Helper for creating args with &strs
#[macro_export] #[macro_export]
macro_rules! tr_strs { macro_rules! tr_strs {
@ -27,72 +32,140 @@ macro_rules! tr_strs {
} }
}; };
} }
use std::collections::HashMap;
pub use tr_strs; pub use tr_strs;
/// The folder containing ftl files for the provided language. /// The folder containing ftl files for the provided language.
/// If a fully qualified folder exists (eg, en_GB), return that. /// If a fully qualified folder exists (eg, en_GB), return that.
/// Otherwise, try the language alone (eg en). /// Otherwise, try the language alone (eg en).
/// If neither folder exists, return None. /// If neither folder exists, return None.
fn lang_folder(lang: LanguageIdentifier, ftl_folder: &Path) -> Option<PathBuf> { fn lang_folder(lang: Option<&LanguageIdentifier>, ftl_folder: &Path) -> Option<PathBuf> {
if let Some(region) = lang.region() { if let Some(lang) = lang {
let path = ftl_folder.join(format!("{}_{}", lang.language(), region)); if let Some(region) = lang.region() {
let path = ftl_folder.join(format!("{}_{}", lang.language(), region));
if fs::metadata(&path).is_ok() {
return Some(path);
}
}
let path = ftl_folder.join(lang.language());
if fs::metadata(&path).is_ok() { if fs::metadata(&path).is_ok() {
return Some(path); Some(path)
} else {
None
}
} else {
// fallback folder
let path = ftl_folder.join("templates");
if fs::metadata(&path).is_ok() {
Some(path)
} else {
None
} }
} }
let path = ftl_folder.join(lang.language());
if fs::metadata(&path).is_ok() {
Some(path)
} else {
None
}
} }
/// Get the fallback/English resource text for the given group. /// Get the template/English resource text for the given group.
/// These are embedded in the binary. /// These are embedded in the binary.
fn ftl_fallback_for_group(group: StringsGroup) -> String { fn ftl_template_text() -> String {
match group { include_str!("ftl/template.ftl").to_string()
StringsGroup::Other => "",
StringsGroup::Test => include_str!("../../tests/support/test.ftl"),
StringsGroup::MediaCheck => include_str!("media-check.ftl"),
StringsGroup::CardTemplates => include_str!("card-template-rendering.ftl"),
StringsGroup::Sync => include_str!("sync.ftl"),
StringsGroup::Network => include_str!("network.ftl"),
StringsGroup::Statistics => include_str!("statistics.ftl"),
StringsGroup::Filtering => include_str!("filtering.ftl"),
StringsGroup::Scheduling => include_str!("scheduling.ftl"),
StringsGroup::DeckConfig => include_str!("deck-config.ftl"),
}
.to_string()
} }
/// Get the resource text for the given group in the given language folder. fn ftl_localized_text(lang: &LanguageIdentifier) -> Option<String> {
/// If the file can't be read, returns None. Some(
fn localized_ftl_for_group(group: StringsGroup, lang_ftl_folder: &Path) -> Option<String> { match lang.language() {
let path = lang_ftl_folder.join(match group { "en" => {
StringsGroup::Other => "", match lang.region() {
StringsGroup::Test => "test.ftl", Some("GB") | Some("AU") => include_str!("ftl/en-GB.ftl"),
StringsGroup::MediaCheck => "media-check.ftl", // use fallback language instead
StringsGroup::CardTemplates => "card-template-rendering.ftl", _ => return None,
StringsGroup::Sync => "sync.ftl", }
StringsGroup::Network => "network.ftl", }
StringsGroup::Statistics => "statistics.ftl", "zh" => match lang.region() {
StringsGroup::Filtering => "filtering.ftl", Some("TW") | Some("HK") => include_str!("ftl/zh-TW.ftl"),
StringsGroup::Scheduling => "scheduling.ftl", _ => include_str!("ftl/zh-CN.ftl"),
StringsGroup::DeckConfig => "deck-config.ftl", },
}); "pt" => {
fs::read_to_string(&path) if let Some("PT") = lang.region() {
.map_err(|e| { include_str!("ftl/pt-PT.ftl")
warn!("Unable to read translation file: {:?}: {}", path, e); } else {
}) include_str!("ftl/pt-BR.ftl")
.ok() }
}
"ga" => include_str!("ftl/ga-IE.ftl"),
"hy" => include_str!("ftl/hy-AM.ftl"),
"nb" => include_str!("ftl/nb-NO.ftl"),
"sv" => include_str!("ftl/sv-SE.ftl"),
"jbo" => include_str!("ftl/jbo.ftl"),
"kab" => include_str!("ftl/kab.ftl"),
"af" => include_str!("ftl/af.ftl"),
"ar" => include_str!("ftl/ar.ftl"),
"bg" => include_str!("ftl/bg.ftl"),
"ca" => include_str!("ftl/ca.ftl"),
"cs" => include_str!("ftl/cs.ftl"),
"da" => include_str!("ftl/da.ftl"),
"de" => include_str!("ftl/de.ftl"),
"el" => include_str!("ftl/el.ftl"),
"eo" => include_str!("ftl/eo.ftl"),
"es" => include_str!("ftl/es.ftl"),
"et" => include_str!("ftl/et.ftl"),
"eu" => include_str!("ftl/eu.ftl"),
"fa" => include_str!("ftl/fa.ftl"),
"fi" => include_str!("ftl/fi.ftl"),
"fr" => include_str!("ftl/fr.ftl"),
"gl" => include_str!("ftl/gl.ftl"),
"he" => include_str!("ftl/he.ftl"),
"hr" => include_str!("ftl/hr.ftl"),
"hu" => include_str!("ftl/hu.ftl"),
"it" => include_str!("ftl/it.ftl"),
"ja" => include_str!("ftl/ja.ftl"),
"ko" => include_str!("ftl/ko.ftl"),
"la" => include_str!("ftl/la.ftl"),
"mn" => include_str!("ftl/mn.ftl"),
"mr" => include_str!("ftl/mr.ftl"),
"ms" => include_str!("ftl/ms.ftl"),
"nl" => include_str!("ftl/nl.ftl"),
"oc" => include_str!("ftl/oc.ftl"),
"pl" => include_str!("ftl/pl.ftl"),
"ro" => include_str!("ftl/ro.ftl"),
"ru" => include_str!("ftl/ru.ftl"),
"sk" => include_str!("ftl/sk.ftl"),
"sl" => include_str!("ftl/sl.ftl"),
"sr" => include_str!("ftl/sr.ftl"),
"th" => include_str!("ftl/th.ftl"),
"tr" => include_str!("ftl/tr.ftl"),
"uk" => include_str!("ftl/uk.ftl"),
"vi" => include_str!("ftl/vi.ftl"),
_ => return None,
}
.to_string(),
)
}
/// Return the text from any .ftl files in the given folder.
fn ftl_external_text(folder: &Path) -> Result<String> {
let mut buf = String::new();
for entry in fs::read_dir(folder)? {
let entry = entry?;
let fname = entry
.file_name()
.into_string()
.unwrap_or_else(|_| "".into());
if !fname.ends_with(".ftl") {
continue;
}
buf += &fs::read_to_string(entry.path())?
}
Ok(buf)
} }
/// Parse resource text into an AST for inclusion in a bundle. /// Parse resource text into an AST for inclusion in a bundle.
/// Returns None if the text contains errors. /// Returns None if text contains errors.
/// extra_text may contain resources loaded from the filesystem
/// at runtime. If it contains errors, they will not prevent a
/// bundle from being returned.
fn get_bundle( fn get_bundle(
text: String, text: String,
extra_text: String,
locales: &[LanguageIdentifier], locales: &[LanguageIdentifier],
) -> Option<FluentBundle<FluentResource>> { ) -> Option<FluentBundle<FluentResource>> {
let res = FluentResource::try_new(text) let res = FluentResource::try_new(text)
@ -109,9 +182,46 @@ fn get_bundle(
}) })
.ok()?; .ok()?;
if !extra_text.is_empty() {
match FluentResource::try_new(extra_text) {
Ok(res) => bundle.add_resource_overriding(res),
Err((_res, e)) => error!("Unable to parse translations file: {:?}", e),
}
}
// disable isolation characters in test mode
if cfg!(test) {
bundle.set_use_isolating(false);
}
// add numeric formatter
set_bundle_formatter_for_langs(&mut bundle, locales);
Some(bundle) Some(bundle)
} }
/// Get a bundle that includes any filesystem overrides.
fn get_bundle_with_extra(
text: String,
lang: Option<&LanguageIdentifier>,
ftl_folder: &Path,
locales: &[LanguageIdentifier],
) -> Option<FluentBundle<FluentResource>> {
let extra_text = if let Some(path) = lang_folder(lang, &ftl_folder) {
match ftl_external_text(&path) {
Ok(text) => text,
Err(e) => {
error!("Error reading external FTL files: {:?}", e);
"".into()
}
}
} else {
"".into()
};
get_bundle(text, extra_text, locales)
}
#[derive(Clone)] #[derive(Clone)]
pub struct I18n { pub struct I18n {
inner: Arc<Mutex<I18nInner>>, inner: Arc<Mutex<I18nInner>>,
@ -119,120 +229,64 @@ pub struct I18n {
impl I18n { impl I18n {
pub fn new<S: AsRef<str>, P: Into<PathBuf>>(locale_codes: &[S], ftl_folder: P) -> Self { pub fn new<S: AsRef<str>, P: Into<PathBuf>>(locale_codes: &[S], ftl_folder: P) -> Self {
let mut langs = vec![];
let mut supported = vec![];
let ftl_folder = ftl_folder.into(); let ftl_folder = ftl_folder.into();
let mut langs = vec![];
let mut bundles = Vec::with_capacity(locale_codes.len() + 1);
for code in locale_codes { for code in locale_codes {
if let Ok(lang) = code.as_ref().parse::<LanguageIdentifier>() { let code = code.as_ref();
if let Ok(lang) = code.parse::<LanguageIdentifier>() {
langs.push(lang.clone()); langs.push(lang.clone());
if let Some(path) = lang_folder(lang.clone(), &ftl_folder) {
supported.push(path);
}
// if English was listed, any further preferences are skipped,
// as the fallback has 100% coverage, and we need to ensure
// it is tried prior to any other langs. But we do keep a file
// if one was returned, to allow locale English variants to take
// priority over the fallback.
if lang.language() == "en" {
break;
}
} }
} }
// add fallback date/time // add fallback date/time
langs.push("en_US".parse().unwrap()); langs.push("en_US".parse().unwrap());
Self { for lang in &langs {
inner: Arc::new(Mutex::new(I18nInner { // if the language is bundled in the binary
langs, if let Some(text) = ftl_localized_text(lang) {
available_ftl_folders: supported, if let Some(bundle) = get_bundle_with_extra(text, Some(lang), &ftl_folder, &langs) {
cache: Default::default(),
})),
}
}
pub fn get(&self, group: StringsGroup) -> Arc<I18nCategory> {
self.inner.lock().unwrap().get(group)
}
}
struct I18nInner {
// all preferred languages of the user, used for determine number format
langs: Vec<LanguageIdentifier>,
// the available ftl folder subset of the user's preferred languages
available_ftl_folders: Vec<PathBuf>,
cache: HashMap<StringsGroup, Arc<I18nCategory>>,
}
impl I18nInner {
pub fn get(&mut self, group: StringsGroup) -> Arc<I18nCategory> {
let langs = &self.langs;
let avail = &self.available_ftl_folders;
self.cache
.entry(group)
.or_insert_with(|| Arc::new(I18nCategory::new(langs, avail, group)))
.clone()
}
}
pub struct I18nCategory {
// bundles in preferred language order, with fallback English as the
// last element
bundles: Vec<FluentBundle<FluentResource>>,
}
fn set_bundle_formatter_for_langs<T>(bundle: &mut FluentBundle<T>, langs: &[LanguageIdentifier]) {
let num_formatter = NumberFormatter::new(langs);
let formatter = move |val: &FluentValue, _intls: &Mutex<IntlLangMemoizer>| -> Option<String> {
match val {
FluentValue::Number(n) => Some(num_formatter.format(n.value)),
_ => None,
}
};
bundle.set_formatter(Some(formatter));
}
impl I18nCategory {
pub fn new(langs: &[LanguageIdentifier], preferred: &[PathBuf], group: StringsGroup) -> Self {
let mut bundles = Vec::with_capacity(preferred.len() + 1);
for ftl_folder in preferred {
if let Some(text) = localized_ftl_for_group(group, ftl_folder) {
if let Some(mut bundle) = get_bundle(text, langs) {
if cfg!(test) {
bundle.set_use_isolating(false);
}
set_bundle_formatter_for_langs(&mut bundle, langs);
bundles.push(bundle); bundles.push(bundle);
} else { } else {
error!("Failed to create bundle for {:?} {:?}", ftl_folder, group); error!("Failed to create bundle for {:?}", lang.language())
}
// if English was listed, any further preferences are skipped,
// as the template has 100% coverage, and we need to ensure
// it is tried prior to any other langs. But we do keep a file
// if one was returned, to allow locale English variants to take
// priority over the template.
if lang.language() == "en" {
break;
} }
} }
} }
let mut fallback_bundle = get_bundle(ftl_fallback_for_group(group), langs).unwrap(); // add English templates
if cfg!(test) { let template_bundle =
fallback_bundle.set_use_isolating(false); get_bundle_with_extra(ftl_template_text(), None, &ftl_folder, &langs).unwrap();
bundles.push(template_bundle);
Self {
inner: Arc::new(Mutex::new(I18nInner { bundles })),
} }
set_bundle_formatter_for_langs(&mut fallback_bundle, langs);
bundles.push(fallback_bundle);
Self { bundles }
} }
/// Get translation with zero arguments. /// Get translation with zero arguments.
pub fn tr(&self, key: &str) -> Cow<str> { pub fn tr(&self, key: FString) -> Cow<str> {
let key = FLUENT_KEYS[key as usize];
self.tr_(key, None) self.tr_(key, None)
} }
/// Get translation with one or more arguments. /// Get translation with one or more arguments.
pub fn trn(&self, key: &str, args: FluentArgs) -> String { pub fn trn(&self, key: FString, args: FluentArgs) -> String {
let key = FLUENT_KEYS[key as usize];
self.tr_(key, Some(args)).into() self.tr_(key, Some(args)).into()
} }
fn tr_<'a>(&'a self, key: &str, args: Option<FluentArgs>) -> Cow<'a, str> { fn tr_<'a>(&'a self, key: &str, args: Option<FluentArgs>) -> Cow<'a, str> {
for bundle in &self.bundles { for bundle in &self.inner.lock().unwrap().bundles {
let msg = match bundle.get_message(key) { let msg = match bundle.get_message(key) {
Some(msg) => msg, Some(msg) => msg,
// not translated in this bundle // not translated in this bundle
@ -254,10 +308,29 @@ impl I18nCategory {
return out.to_string().into(); return out.to_string().into();
} }
format!("Missing translation key: {}", key).into() // return the key name if it was missing
key.to_string().into()
} }
} }
struct I18nInner {
// bundles in preferred language order, with template English as the
// last element
bundles: Vec<FluentBundle<FluentResource>>,
}
fn set_bundle_formatter_for_langs<T>(bundle: &mut FluentBundle<T>, langs: &[LanguageIdentifier]) {
let num_formatter = NumberFormatter::new(langs);
let formatter = move |val: &FluentValue, _intls: &Mutex<IntlLangMemoizer>| -> Option<String> {
match val {
FluentValue::Number(n) => Some(num_formatter.format(n.value)),
_ => None,
}
};
bundle.set_formatter(Some(formatter));
}
fn first_available_num_format_locale(langs: &[LanguageIdentifier]) -> Option<Locale> { fn first_available_num_format_locale(langs: &[LanguageIdentifier]) -> Option<Locale> {
for lang in langs { for lang in langs {
if let Some(locale) = num_format_locale(lang) { if let Some(locale) = num_format_locale(lang) {
@ -315,8 +388,8 @@ impl NumberFormatter {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use crate::i18n::NumberFormatter;
use crate::i18n::{tr_args, I18n}; use crate::i18n::{tr_args, I18n};
use crate::i18n::{NumberFormatter, StringsGroup};
use std::path::PathBuf; use std::path::PathBuf;
use unic_langid::langid; use unic_langid::langid;
@ -331,56 +404,48 @@ mod test {
#[test] #[test]
fn i18n() { fn i18n() {
// English fallback let mut ftl_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
let i18n = I18n::new(&["zz"], "../../tests/support"); ftl_dir.push("tests/support/ftl");
let cat = i18n.get(StringsGroup::Test);
assert_eq!(cat.tr("valid-key"), "a valid key"); // English template
assert_eq!( let i18n = I18n::new(&["zz"], &ftl_dir);
cat.tr("invalid-key"), assert_eq!(i18n.tr_("valid-key", None), "a valid key");
"Missing translation key: invalid-key" assert_eq!(i18n.tr_("invalid-key", None), "invalid-key");
);
assert_eq!( assert_eq!(
cat.trn("two-args-key", tr_args!["one"=>1.1, "two"=>"2"]), i18n.tr_("two-args-key", Some(tr_args!["one"=>1.1, "two"=>"2"])),
"two args: 1.10 and 2" "two args: 1.10 and 2"
); );
// commented out to avoid scary warning during unit tests
// assert_eq!(
// cat.trn("two-args-key", tr_args!["one"=>"testing error reporting"]),
// "two args: testing error reporting and {$two}"
// );
assert_eq!(cat.trn("plural", tr_args!["hats"=>1.0]), "You have 1 hat.");
assert_eq!( assert_eq!(
cat.trn("plural", tr_args!["hats"=>1.1]), i18n.tr_("plural", Some(tr_args!["hats"=>1.0])),
"You have 1 hat."
);
assert_eq!(
i18n.tr_("plural", Some(tr_args!["hats"=>1.1])),
"You have 1.10 hats." "You have 1.10 hats."
); );
assert_eq!(cat.trn("plural", tr_args!["hats"=>3]), "You have 3 hats.");
// Another language
let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
d.push("tests/support");
let i18n = I18n::new(&["ja_JP"], &d);
let cat = i18n.get(StringsGroup::Test);
assert_eq!(cat.tr("valid-key"), "キー");
assert_eq!(cat.tr("only-in-english"), "not translated");
assert_eq!( assert_eq!(
cat.tr("invalid-key"), i18n.tr_("plural", Some(tr_args!["hats"=>3])),
"Missing translation key: invalid-key" "You have 3 hats."
); );
// Another language
let i18n = I18n::new(&["ja_JP"], &ftl_dir);
assert_eq!(i18n.tr_("valid-key", None), "キー");
assert_eq!(i18n.tr_("only-in-english", None), "not translated");
assert_eq!(i18n.tr_("invalid-key", None), "invalid-key");
assert_eq!( assert_eq!(
cat.trn("two-args-key", tr_args!["one"=>1, "two"=>"2"]), i18n.tr_("two-args-key", Some(tr_args!["one"=>1, "two"=>"2"])),
"1と2" "1と2"
); );
// Decimal separator // Decimal separator
let i18n = I18n::new(&["pl-PL"], &d); let i18n = I18n::new(&["pl-PL"], &ftl_dir);
let cat = i18n.get(StringsGroup::Test);
// falls back on English, but with Polish separators // falls back on English, but with Polish separators
assert_eq!( assert_eq!(
cat.trn("two-args-key", tr_args!["one"=>1, "two"=>2.07]), i18n.tr_("two-args-key", Some(tr_args!["one"=>1, "two"=>2.07])),
"two args: 1 and 2,07" "two args: 1 and 2,07"
); );
} }

View file

@ -3,7 +3,7 @@
use crate::cloze::expand_clozes_to_reveal_latex; use crate::cloze::expand_clozes_to_reveal_latex;
use crate::err::{AnkiError, Result}; use crate::err::{AnkiError, Result};
use crate::i18n::{tr_args, tr_strs, I18n, StringsGroup}; use crate::i18n::{tr_args, tr_strs, FString, I18n};
use crate::latex::extract_latex; use crate::latex::extract_latex;
use crate::media::col::{ use crate::media::col::{
for_every_note, get_note_types, mark_collection_modified, open_or_create_collection_db, for_every_note, get_note_types, mark_collection_modified, open_or_create_collection_db,
@ -89,35 +89,53 @@ where
pub fn summarize_output(&self, output: &mut MediaCheckOutput) -> String { pub fn summarize_output(&self, output: &mut MediaCheckOutput) -> String {
let mut buf = String::new(); let mut buf = String::new();
let cat = self.i18n.get(StringsGroup::MediaCheck); let i = &self.i18n;
// top summary area // top summary area
buf += &cat.trn("missing-count", tr_args!["count"=>output.missing.len()]); buf += &i.trn(
FString::MediaCheckMissingCount,
tr_args!["count"=>output.missing.len()],
);
buf.push('\n'); buf.push('\n');
buf += &cat.trn("unused-count", tr_args!["count"=>output.unused.len()]); buf += &i.trn(
FString::MediaCheckUnusedCount,
tr_args!["count"=>output.unused.len()],
);
buf.push('\n'); buf.push('\n');
if !output.renamed.is_empty() { if !output.renamed.is_empty() {
buf += &cat.trn("renamed-count", tr_args!["count"=>output.renamed.len()]); buf += &i.trn(
FString::MediaCheckRenamedCount,
tr_args!["count"=>output.renamed.len()],
);
buf.push('\n'); buf.push('\n');
} }
if !output.oversize.is_empty() { if !output.oversize.is_empty() {
buf += &cat.trn("oversize-count", tr_args!["count"=>output.oversize.len()]); buf += &i.trn(
FString::MediaCheckOversizeCount,
tr_args!["count"=>output.oversize.len()],
);
buf.push('\n'); buf.push('\n');
} }
if !output.dirs.is_empty() { if !output.dirs.is_empty() {
buf += &cat.trn("subfolder-count", tr_args!["count"=>output.dirs.len()]); buf += &i.trn(
FString::MediaCheckSubfolderCount,
tr_args!["count"=>output.dirs.len()],
);
buf.push('\n'); buf.push('\n');
} }
buf.push('\n'); buf.push('\n');
if !output.renamed.is_empty() { if !output.renamed.is_empty() {
buf += &cat.tr("renamed-header"); buf += &i.tr(FString::MediaCheckRenamedHeader);
buf.push('\n'); buf.push('\n');
for (old, new) in &output.renamed { for (old, new) in &output.renamed {
buf += &cat.trn("renamed-file", tr_strs!["old"=>old,"new"=>new]); buf += &i.trn(
FString::MediaCheckRenamedFile,
tr_strs!["old"=>old,"new"=>new],
);
buf.push('\n'); buf.push('\n');
} }
buf.push('\n') buf.push('\n')
@ -125,10 +143,10 @@ where
if !output.oversize.is_empty() { if !output.oversize.is_empty() {
output.oversize.sort(); output.oversize.sort();
buf += &cat.tr("oversize-header"); buf += &i.tr(FString::MediaCheckOversizeHeader);
buf.push('\n'); buf.push('\n');
for fname in &output.oversize { for fname in &output.oversize {
buf += &cat.trn("oversize-file", tr_strs!["filename"=>fname]); buf += &i.trn(FString::MediaCheckOversizeFile, tr_strs!["filename"=>fname]);
buf.push('\n'); buf.push('\n');
} }
buf.push('\n') buf.push('\n')
@ -136,10 +154,13 @@ where
if !output.dirs.is_empty() { if !output.dirs.is_empty() {
output.dirs.sort(); output.dirs.sort();
buf += &cat.tr("subfolder-header"); buf += &i.tr(FString::MediaCheckSubfolderHeader);
buf.push('\n'); buf.push('\n');
for fname in &output.dirs { for fname in &output.dirs {
buf += &cat.trn("subfolder-file", tr_strs!["filename"=>fname]); buf += &i.trn(
FString::MediaCheckSubfolderFile,
tr_strs!["filename"=>fname],
);
buf.push('\n'); buf.push('\n');
} }
buf.push('\n') buf.push('\n')
@ -147,10 +168,10 @@ where
if !output.missing.is_empty() { if !output.missing.is_empty() {
output.missing.sort(); output.missing.sort();
buf += &cat.tr("missing-header"); buf += &i.tr(FString::MediaCheckMissingHeader);
buf.push('\n'); buf.push('\n');
for fname in &output.missing { for fname in &output.missing {
buf += &cat.trn("missing-file", tr_strs!["filename"=>fname]); buf += &i.trn(FString::MediaCheckMissingFile, tr_strs!["filename"=>fname]);
buf.push('\n'); buf.push('\n');
} }
buf.push('\n') buf.push('\n')
@ -158,10 +179,10 @@ where
if !output.unused.is_empty() { if !output.unused.is_empty() {
output.unused.sort(); output.unused.sort();
buf += &cat.tr("unused-header"); buf += &i.tr(FString::MediaCheckUnusedHeader);
buf.push('\n'); buf.push('\n');
for fname in &output.unused { for fname in &output.unused {
buf += &cat.trn("unused-file", tr_strs!["filename"=>fname]); buf += &i.trn(FString::MediaCheckUnusedFile, tr_strs!["filename"=>fname]);
buf.push('\n'); buf.push('\n');
} }
} }

View file

@ -1,7 +1,7 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::i18n::{tr_args, I18n, StringsGroup}; use crate::i18n::{tr_args, FString, I18n};
/// Short string like '4d' to place above answer buttons. /// Short string like '4d' to place above answer buttons.
pub fn answer_button_time(seconds: f32, i18n: &I18n) -> String { pub fn answer_button_time(seconds: f32, i18n: &I18n) -> String {
@ -11,10 +11,16 @@ pub fn answer_button_time(seconds: f32, i18n: &I18n) -> String {
// we don't show fractional values except for months/years // we don't show fractional values except for months/years
_ => span.as_unit().round(), _ => span.as_unit().round(),
}; };
let unit = span.unit().as_str();
let args = tr_args!["amount" => amount]; let args = tr_args!["amount" => amount];
i18n.get(StringsGroup::Scheduling) let key = match span.unit() {
.trn(&format!("answer-button-time-{}", unit), args) TimespanUnit::Seconds => FString::SchedulingAnswerButtonTimeSeconds,
TimespanUnit::Minutes => FString::SchedulingAnswerButtonTimeMinutes,
TimespanUnit::Hours => FString::SchedulingAnswerButtonTimeHours,
TimespanUnit::Days => FString::SchedulingAnswerButtonTimeDays,
TimespanUnit::Months => FString::SchedulingAnswerButtonTimeMonths,
TimespanUnit::Years => FString::SchedulingAnswerButtonTimeYears,
};
i18n.trn(key, args)
} }
/// Describe the given seconds using the largest appropriate unit /// Describe the given seconds using the largest appropriate unit
@ -22,10 +28,16 @@ pub fn answer_button_time(seconds: f32, i18n: &I18n) -> String {
pub fn time_span(seconds: f32, i18n: &I18n) -> String { pub fn time_span(seconds: f32, i18n: &I18n) -> String {
let span = Timespan::from_secs(seconds).natural_span(); let span = Timespan::from_secs(seconds).natural_span();
let amount = span.as_unit(); let amount = span.as_unit();
let unit = span.unit().as_str();
let args = tr_args!["amount" => amount]; let args = tr_args!["amount" => amount];
i18n.get(StringsGroup::Scheduling) let key = match span.unit() {
.trn(&format!("time-span-{}", unit), args) TimespanUnit::Seconds => FString::SchedulingTimeSpanSeconds,
TimespanUnit::Minutes => FString::SchedulingTimeSpanMinutes,
TimespanUnit::Hours => FString::SchedulingTimeSpanHours,
TimespanUnit::Days => FString::SchedulingTimeSpanDays,
TimespanUnit::Months => FString::SchedulingTimeSpanMonths,
TimespanUnit::Years => FString::SchedulingTimeSpanYears,
};
i18n.trn(key, args)
} }
// fixme: this doesn't belong here // fixme: this doesn't belong here
@ -40,8 +52,7 @@ pub fn studied_today(cards: usize, secs: f32, i18n: &I18n) -> String {
}; };
let args = tr_args!["amount" => amount, "unit" => unit, let args = tr_args!["amount" => amount, "unit" => unit,
"cards" => cards, "secs-per-card" => secs_per]; "cards" => cards, "secs-per-card" => secs_per];
i18n.get(StringsGroup::Statistics) i18n.trn(FString::StatisticsStudiedToday, args)
.trn("studied-today", args)
} }
// fixme: this doesn't belong here // fixme: this doesn't belong here
@ -58,10 +69,8 @@ pub fn learning_congrats(remaining: usize, next_due: f32, i18n: &I18n) -> String
let remaining_args = tr_args!["remaining" => remaining]; let remaining_args = tr_args!["remaining" => remaining];
format!( format!(
"{} {}", "{} {}",
i18n.get(StringsGroup::Scheduling) i18n.trn(FString::SchedulingNextLearnDue, next_args),
.trn("next-learn-due", next_args), i18n.trn(FString::SchedulingLearnRemaining, remaining_args)
i18n.get(StringsGroup::Scheduling)
.trn("learn-remaining", remaining_args)
) )
} }

View file

@ -2,7 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::err::{AnkiError, Result, TemplateError}; use crate::err::{AnkiError, Result, TemplateError};
use crate::i18n::{tr_strs, I18n, I18nCategory, StringsGroup}; use crate::i18n::{tr_strs, FString, I18n};
use crate::template_filters::apply_filters; use crate::template_filters::apply_filters;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use nom; use nom;
@ -196,14 +196,13 @@ fn parse_inner<'a, I: Iterator<Item = TemplateResult<Token<'a>>>>(
} }
fn template_error_to_anki_error(err: TemplateError, q_side: bool, i18n: &I18n) -> AnkiError { fn template_error_to_anki_error(err: TemplateError, q_side: bool, i18n: &I18n) -> AnkiError {
let cat = i18n.get(StringsGroup::CardTemplates); let header = i18n.tr(if q_side {
let header = cat.tr(if q_side { FString::CardTemplateRenderingFrontSideProblem
"front-side-problem"
} else { } else {
"back-side-problem" FString::CardTemplateRenderingBackSideProblem
}); });
let details = localized_template_error(&cat, err); let details = localized_template_error(i18n, err);
let more_info = cat.tr("more-info"); let more_info = i18n.tr(FString::CardTemplateRenderingMoreInfo);
let info = format!( let info = format!(
"{}<br>{}<br><a href='{}'>{}</a>", "{}<br>{}<br><a href='{}'>{}</a>",
header, details, TEMPLATE_ERROR_LINK, more_info header, details, TEMPLATE_ERROR_LINK, more_info
@ -212,13 +211,14 @@ fn template_error_to_anki_error(err: TemplateError, q_side: bool, i18n: &I18n) -
AnkiError::TemplateError { info } AnkiError::TemplateError { info }
} }
fn localized_template_error(cat: &I18nCategory, err: TemplateError) -> String { fn localized_template_error(i18n: &I18n, err: TemplateError) -> String {
match err { match err {
TemplateError::NoClosingBrackets(tag) => { TemplateError::NoClosingBrackets(tag) => i18n.trn(
cat.trn("no-closing-brackets", tr_strs!("tag"=>tag, "missing"=>"}}")) FString::CardTemplateRenderingNoClosingBrackets,
} tr_strs!("tag"=>tag, "missing"=>"}}"),
TemplateError::ConditionalNotClosed(tag) => cat.trn( ),
"conditional-not-closed", TemplateError::ConditionalNotClosed(tag) => i18n.trn(
FString::CardTemplateRenderingConditionalNotClosed,
tr_strs!("missing"=>format!("{{{{/{}}}}}", tag)), tr_strs!("missing"=>format!("{{{{/{}}}}}", tag)),
), ),
TemplateError::ConditionalNotOpen { TemplateError::ConditionalNotOpen {
@ -226,15 +226,15 @@ fn localized_template_error(cat: &I18nCategory, err: TemplateError) -> String {
currently_open, currently_open,
} => { } => {
if let Some(open) = currently_open { if let Some(open) = currently_open {
cat.trn( i18n.trn(
"wrong-conditional-closed", FString::CardTemplateRenderingWrongConditionalClosed,
tr_strs!( tr_strs!(
"found"=>format!("{{{{/{}}}}}", closed), "found"=>format!("{{{{/{}}}}}", closed),
"expected"=>format!("{{{{/{}}}}}", open)), "expected"=>format!("{{{{/{}}}}}", open)),
) )
} else { } else {
cat.trn( i18n.trn(
"conditional-not-open", FString::CardTemplateRenderingConditionalNotOpen,
tr_strs!( tr_strs!(
"found"=>format!("{{{{/{}}}}}", closed), "found"=>format!("{{{{/{}}}}}", closed),
"missing1"=>format!("{{{{#{}}}}}", closed), "missing1"=>format!("{{{{#{}}}}}", closed),
@ -243,8 +243,8 @@ fn localized_template_error(cat: &I18nCategory, err: TemplateError) -> String {
) )
} }
} }
TemplateError::FieldNotFound { field, filters } => cat.trn( TemplateError::FieldNotFound { field, filters } => i18n.trn(
"no-such-field", FString::CardTemplateRenderingNoSuchField,
tr_strs!( tr_strs!(
"found"=>format!("{{{{{}{}}}}}", filters, field), "found"=>format!("{{{{{}{}}}}}", filters, field),
"field"=>field), "field"=>field),
@ -508,12 +508,11 @@ pub fn render_card(
// check if the front side was empty // check if the front side was empty
if !qtmpl.renders_with_fields(context.nonempty_fields) { if !qtmpl.renders_with_fields(context.nonempty_fields) {
let cat = i18n.get(StringsGroup::CardTemplates);
let info = format!( let info = format!(
"{}<br><a href='{}'>{}</a>", "{}<br><a href='{}'>{}</a>",
cat.tr("empty-front"), i18n.tr(FString::CardTemplateRenderingEmptyFront),
TEMPLATE_BLANK_LINK, TEMPLATE_BLANK_LINK,
cat.tr("more-info") i18n.tr(FString::CardTemplateRenderingMoreInfo)
); );
return Err(AnkiError::TemplateError { info }); return Err(AnkiError::TemplateError { info });
}; };