mirror of
https://github.com/ankitects/anki.git
synced 2025-09-18 14:02:21 -04:00
Add an improved tool for copying/moving FTL strings
This commit is contained in:
parent
04b74a56b7
commit
aced75f743
9 changed files with 230 additions and 125 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -1290,6 +1290,7 @@ dependencies = [
|
||||||
"camino",
|
"camino",
|
||||||
"clap",
|
"clap",
|
||||||
"fluent-syntax",
|
"fluent-syntax",
|
||||||
|
"itertools 0.11.0",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"regex",
|
"regex",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
|
1
ftl/.gitignore
vendored
1
ftl/.gitignore
vendored
|
@ -1,2 +1,3 @@
|
||||||
usage/*
|
usage/*
|
||||||
!usage/no-deprecate.json
|
!usage/no-deprecate.json
|
||||||
|
mobile-repo
|
||||||
|
|
|
@ -15,6 +15,7 @@ anyhow.workspace = true
|
||||||
camino.workspace = true
|
camino.workspace = true
|
||||||
clap.workspace = true
|
clap.workspace = true
|
||||||
fluent-syntax.workspace = true
|
fluent-syntax.workspace = true
|
||||||
|
itertools.workspace = true
|
||||||
lazy_static.workspace = true
|
lazy_static.workspace = true
|
||||||
regex.workspace = true
|
regex.workspace = true
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
|
|
|
@ -1,122 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|
||||||
|
|
||||||
# pylint: disable=unbalanced-tuple-unpacking
|
|
||||||
|
|
||||||
import copy
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from fluent.syntax import parse, serialize
|
|
||||||
from fluent.syntax.ast import Junk, Message
|
|
||||||
|
|
||||||
# clone an existing ftl string as a new key
|
|
||||||
# eg:
|
|
||||||
# $ python duplicate-string.py \
|
|
||||||
# /source/templates/media-check.ftl window-title \
|
|
||||||
# /dest/templates/something.ftl key-name
|
|
||||||
#
|
|
||||||
# after running, you'll need to copy the output template file into Anki's source
|
|
||||||
|
|
||||||
(src_filename, old_key, dst_filename, new_key) = sys.argv[1:]
|
|
||||||
|
|
||||||
# add file as prefix to key
|
|
||||||
src_prefix = os.path.splitext(os.path.basename(src_filename))[0]
|
|
||||||
dst_prefix = os.path.splitext(os.path.basename(dst_filename))[0]
|
|
||||||
old_key = f"{src_prefix}-{old_key}"
|
|
||||||
new_key = f"{dst_prefix}-{new_key}"
|
|
||||||
|
|
||||||
|
|
||||||
def get_entry(fname, key):
|
|
||||||
if not os.path.exists(fname):
|
|
||||||
return
|
|
||||||
|
|
||||||
with open(fname, encoding="utf8") as file:
|
|
||||||
orig = file.read()
|
|
||||||
obj = parse(orig)
|
|
||||||
for ent in obj.body:
|
|
||||||
if isinstance(ent, Junk):
|
|
||||||
raise Exception(f"file had junk! {fname} {ent}")
|
|
||||||
elif isinstance(ent, Message):
|
|
||||||
if ent.id.name == old_key:
|
|
||||||
return copy.deepcopy(ent)
|
|
||||||
|
|
||||||
|
|
||||||
def write_entry(fname, key, entry):
|
|
||||||
assert entry
|
|
||||||
entry.id.name = key
|
|
||||||
|
|
||||||
if not os.path.exists(fname):
|
|
||||||
orig = ""
|
|
||||||
else:
|
|
||||||
with open(fname, encoding="utf8") as file:
|
|
||||||
orig = file.read()
|
|
||||||
obj = parse(orig)
|
|
||||||
for ent in obj.body:
|
|
||||||
if isinstance(ent, Junk):
|
|
||||||
raise Exception(f"file had junk! {fname} {ent}")
|
|
||||||
|
|
||||||
obj.body.append(entry)
|
|
||||||
modified = serialize(obj, with_junk=True)
|
|
||||||
# escape leading dots
|
|
||||||
modified = re.sub(r"(?ms)^( +)\.", '\\1{"."}', modified)
|
|
||||||
|
|
||||||
# ensure the resulting serialized file is valid by parsing again
|
|
||||||
obj = parse(modified)
|
|
||||||
for ent in obj.body:
|
|
||||||
if isinstance(ent, Junk):
|
|
||||||
raise Exception(f"introduced junk! {fname} {ent}")
|
|
||||||
|
|
||||||
# it's ok, write it out
|
|
||||||
with open(fname, "w", encoding="utf8") as file:
|
|
||||||
file.write(modified)
|
|
||||||
|
|
||||||
|
|
||||||
# get all existing entries into lang -> entry
|
|
||||||
entries = {}
|
|
||||||
|
|
||||||
i18ndir = os.path.join(os.path.dirname(src_filename), "..")
|
|
||||||
langs = os.listdir(i18ndir)
|
|
||||||
|
|
||||||
for lang in langs:
|
|
||||||
if lang == "templates":
|
|
||||||
# template
|
|
||||||
ftl_path = src_filename
|
|
||||||
else:
|
|
||||||
# translation
|
|
||||||
ftl_path = src_filename.replace("templates", lang)
|
|
||||||
ftl_dir = os.path.dirname(ftl_path)
|
|
||||||
|
|
||||||
if not os.path.exists(ftl_dir):
|
|
||||||
continue
|
|
||||||
|
|
||||||
entry = get_entry(ftl_path, old_key)
|
|
||||||
if entry:
|
|
||||||
entries[lang] = entry
|
|
||||||
else:
|
|
||||||
assert lang != "templates"
|
|
||||||
|
|
||||||
# write them into target files
|
|
||||||
|
|
||||||
i18ndir = os.path.join(os.path.dirname(dst_filename), "..")
|
|
||||||
langs = os.listdir(i18ndir)
|
|
||||||
|
|
||||||
for lang in langs:
|
|
||||||
if lang == "templates":
|
|
||||||
# template
|
|
||||||
ftl_path = dst_filename
|
|
||||||
else:
|
|
||||||
# translation
|
|
||||||
ftl_path = dst_filename.replace("templates", lang)
|
|
||||||
ftl_dir = os.path.dirname(ftl_path)
|
|
||||||
|
|
||||||
if not os.path.exists(ftl_dir):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if lang in entries:
|
|
||||||
entry = entries[lang]
|
|
||||||
write_entry(ftl_path, new_key, entry)
|
|
||||||
|
|
||||||
print("done")
|
|
4
ftl/ftl
Executable file
4
ftl/ftl
Executable file
|
@ -0,0 +1,4 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
cd $(dirname $0)/..
|
||||||
|
cargo run -p ftl -- $*
|
6
ftl/move-from-ankimobile
Executable file
6
ftl/move-from-ankimobile
Executable file
|
@ -0,0 +1,6 @@
|
||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Move a translation that previously only existed in AnkiMobile to the core translations.
|
||||||
|
#
|
||||||
|
|
||||||
|
./ftl string move ftl/mobile-repo/mobile ftl/core-repo/core $*
|
|
@ -16,7 +16,6 @@ use fluent_syntax::ast::Resource;
|
||||||
use fluent_syntax::parser;
|
use fluent_syntax::parser;
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use serde_json;
|
|
||||||
use walkdir::DirEntry;
|
use walkdir::DirEntry;
|
||||||
use walkdir::WalkDir;
|
use walkdir::WalkDir;
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
pub mod garbage_collection;
|
|
||||||
pub mod serialize;
|
|
||||||
|
|
||||||
|
mod garbage_collection;
|
||||||
|
mod serialize;
|
||||||
|
mod string;
|
||||||
mod sync;
|
mod sync;
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
@ -14,6 +15,8 @@ use garbage_collection::write_ftl_json;
|
||||||
use garbage_collection::DeprecateEntriesArgs;
|
use garbage_collection::DeprecateEntriesArgs;
|
||||||
use garbage_collection::GarbageCollectArgs;
|
use garbage_collection::GarbageCollectArgs;
|
||||||
use garbage_collection::WriteJsonArgs;
|
use garbage_collection::WriteJsonArgs;
|
||||||
|
use string::string_operation;
|
||||||
|
use string::StringArgs;
|
||||||
|
|
||||||
#[derive(Parser)]
|
#[derive(Parser)]
|
||||||
struct Cli {
|
struct Cli {
|
||||||
|
@ -38,6 +41,10 @@ enum Command {
|
||||||
/// and adding a deprecation warning. An entry is considered unused if
|
/// and adding a deprecation warning. An entry is considered unused if
|
||||||
/// cannot be found in a source or JSON file.
|
/// cannot be found in a source or JSON file.
|
||||||
Deprecate(DeprecateEntriesArgs),
|
Deprecate(DeprecateEntriesArgs),
|
||||||
|
/// Copy or move a key from one ftl file to another, including all its
|
||||||
|
/// translations. Source and destination should be e.g.
|
||||||
|
/// ftl/core-repo/core.
|
||||||
|
String(StringArgs),
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() -> Result<()> {
|
fn main() -> Result<()> {
|
||||||
|
@ -46,5 +53,6 @@ fn main() -> Result<()> {
|
||||||
Command::WriteJson(args) => write_ftl_json(args),
|
Command::WriteJson(args) => write_ftl_json(args),
|
||||||
Command::GarbageCollect(args) => garbage_collect_ftl_entries(args),
|
Command::GarbageCollect(args) => garbage_collect_ftl_entries(args),
|
||||||
Command::Deprecate(args) => deprecate_ftl_entries(args),
|
Command::Deprecate(args) => deprecate_ftl_entries(args),
|
||||||
|
Command::String(args) => string_operation(args),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
207
ftl/src/string.rs
Normal file
207
ftl/src/string.rs
Normal file
|
@ -0,0 +1,207 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::fs;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use anki_io::read_to_string;
|
||||||
|
use anki_io::write_file;
|
||||||
|
use anki_io::write_file_if_changed;
|
||||||
|
use anki_io::ToUtf8PathBuf;
|
||||||
|
use anyhow::Context;
|
||||||
|
use anyhow::Result;
|
||||||
|
use camino::Utf8Component;
|
||||||
|
use camino::Utf8Path;
|
||||||
|
use camino::Utf8PathBuf;
|
||||||
|
use clap::Args;
|
||||||
|
use clap::ValueEnum;
|
||||||
|
use fluent_syntax::ast::Entry;
|
||||||
|
use fluent_syntax::parser;
|
||||||
|
|
||||||
|
use crate::serialize;
|
||||||
|
|
||||||
|
#[derive(Clone, ValueEnum, PartialEq, Eq, Debug)]
|
||||||
|
pub enum StringOperation {
|
||||||
|
Copy,
|
||||||
|
Move,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Args)]
|
||||||
|
pub struct StringArgs {
|
||||||
|
operation: StringOperation,
|
||||||
|
/// The folder which contains the different languages as subfolders, e.g.
|
||||||
|
/// ftl/core-repo/core
|
||||||
|
src_lang_folder: Utf8PathBuf,
|
||||||
|
dst_lang_folder: Utf8PathBuf,
|
||||||
|
/// E.g. 'actions-run'. File will be inferred from the prefix.
|
||||||
|
src_key: String,
|
||||||
|
/// If not specified, the key & file will be the same as the source key.
|
||||||
|
dst_key: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn string_operation(args: StringArgs) -> Result<()> {
|
||||||
|
let old_key = &args.src_key;
|
||||||
|
let new_key = args.dst_key.as_ref().unwrap_or(old_key);
|
||||||
|
let src_ftl_file = ftl_file_from_key(old_key);
|
||||||
|
let dst_ftl_file = ftl_file_from_key(new_key);
|
||||||
|
let mut entries: HashMap<&str, Entry<String>> = HashMap::new();
|
||||||
|
|
||||||
|
// Fetch source strings
|
||||||
|
let src_langs = all_langs(&args.src_lang_folder)?;
|
||||||
|
for lang in &src_langs {
|
||||||
|
let ftl_path = lang.join(&src_ftl_file);
|
||||||
|
if !ftl_path.exists() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let entry = get_entry(&ftl_path, old_key);
|
||||||
|
if let Some(entry) = entry {
|
||||||
|
entries.insert(lang.file_name().unwrap(), entry);
|
||||||
|
} else {
|
||||||
|
// the key might be missing from some languages, but it should not be missing
|
||||||
|
// from the template
|
||||||
|
assert_ne!(lang, "templates");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply to destination
|
||||||
|
let dst_langs = all_langs(&args.dst_lang_folder)?;
|
||||||
|
for lang in &dst_langs {
|
||||||
|
let ftl_path = lang.join(&dst_ftl_file);
|
||||||
|
if !ftl_path.exists() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(entry) = entries.get(lang.file_name().unwrap()) {
|
||||||
|
println!("Updating {ftl_path}");
|
||||||
|
write_entry(&ftl_path, new_key, entry.clone())?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(template_dir) = additional_template_folder(&args.dst_lang_folder) {
|
||||||
|
// Our templates are also stored in the source tree, and need to be updated too.
|
||||||
|
let ftl_path = template_dir.join(&dst_ftl_file);
|
||||||
|
println!("Updating {ftl_path}");
|
||||||
|
write_entry(
|
||||||
|
&ftl_path,
|
||||||
|
new_key,
|
||||||
|
entries.get("templates").unwrap().clone(),
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if args.operation == StringOperation::Move {
|
||||||
|
// Delete the old key
|
||||||
|
for lang in &src_langs {
|
||||||
|
let ftl_path = lang.join(&src_ftl_file);
|
||||||
|
if !ftl_path.exists() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if delete_entry(&ftl_path, old_key)? {
|
||||||
|
println!("Deleted entry from {ftl_path}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(template_dir) = additional_template_folder(&args.src_lang_folder) {
|
||||||
|
let ftl_path = template_dir.join(&src_ftl_file);
|
||||||
|
if delete_entry(&ftl_path, old_key)? {
|
||||||
|
println!("Deleted entry from {ftl_path}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn additional_template_folder(dst_folder: &Utf8Path) -> Option<Utf8PathBuf> {
|
||||||
|
// ftl/core-repo/core -> ftl/core
|
||||||
|
// ftl/qt-repo/qt -> ftl/qt
|
||||||
|
let adjusted_path = Utf8PathBuf::from_iter(
|
||||||
|
[Utf8Component::Normal("ftl")]
|
||||||
|
.into_iter()
|
||||||
|
.chain(dst_folder.components().skip(2)),
|
||||||
|
);
|
||||||
|
if adjusted_path.exists() {
|
||||||
|
Some(adjusted_path)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn all_langs(lang_folder: &Utf8Path) -> Result<Vec<Utf8PathBuf>> {
|
||||||
|
std::fs::read_dir(lang_folder)
|
||||||
|
.with_context(|| format!("reading {:?}", lang_folder))?
|
||||||
|
.filter_map(Result::ok)
|
||||||
|
.map(|e| Ok(e.path().utf8()?))
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ftl_file_from_key(old_key: &str) -> String {
|
||||||
|
format!("{}.ftl", old_key.split('-').next().unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_entry(fname: &Utf8Path, key: &str) -> Option<Entry<String>> {
|
||||||
|
let content = fs::read_to_string(fname).unwrap();
|
||||||
|
let resource = parser::parse(content).unwrap();
|
||||||
|
for entry in resource.body {
|
||||||
|
if let Entry::Message(message) = entry {
|
||||||
|
if message.id.name == key {
|
||||||
|
return Some(Entry::Message(message));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_entry(path: &Utf8Path, key: &str, mut entry: Entry<String>) -> Result<()> {
|
||||||
|
if let Entry::Message(message) = &mut entry {
|
||||||
|
message.id.name = key.to_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
let content = if Path::new(path).exists() {
|
||||||
|
fs::read_to_string(path).unwrap()
|
||||||
|
} else {
|
||||||
|
String::new()
|
||||||
|
};
|
||||||
|
let mut resource = parser::parse(content).unwrap();
|
||||||
|
resource.body.push(entry);
|
||||||
|
|
||||||
|
let mut modified = serialize::serialize(&resource);
|
||||||
|
// escape leading dots
|
||||||
|
modified = modified.replace(" +.", " +{\".\"}");
|
||||||
|
|
||||||
|
// ensure the resulting serialized file is valid by parsing again
|
||||||
|
let _ = parser::parse(modified.clone()).unwrap();
|
||||||
|
|
||||||
|
// it's ok, write it out
|
||||||
|
Ok(write_file(path, modified)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn delete_entry(path: &Utf8Path, key: &str) -> Result<bool> {
|
||||||
|
let content = read_to_string(path)?;
|
||||||
|
let mut resource = parser::parse(content).unwrap();
|
||||||
|
let mut did_change = false;
|
||||||
|
resource.body.retain(|entry| {
|
||||||
|
!if let Entry::Message(message) = entry {
|
||||||
|
if message.id.name == key {
|
||||||
|
did_change = true;
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut modified = serialize::serialize(&resource);
|
||||||
|
// escape leading dots
|
||||||
|
modified = modified.replace(" +.", " +{\".\"}");
|
||||||
|
|
||||||
|
// ensure the resulting serialized file is valid by parsing again
|
||||||
|
let _ = parser::parse(modified.clone()).unwrap();
|
||||||
|
|
||||||
|
// it's ok, write it out
|
||||||
|
write_file_if_changed(path, modified)?;
|
||||||
|
Ok(did_change)
|
||||||
|
}
|
Loading…
Reference in a new issue