mirror of
https://github.com/ankitects/anki.git
synced 2025-09-18 14:02:21 -04:00
Add support for using n2 instead of ninja
Provides better visibility into what the build is currently doing. Motivated by slow node.js downloads making the build appear stuck. You can test this out by running ./tools/install-n2 then building normally. Please report any problems, and 'cargo uninstall n2' to get back to the old behaviour. It works on Windows, but prints a new line each second instead of redrawing the same area. A couple of changes were required for compatibility: - n2 doesn't resolve $variable names inside other variables, so the resolution needs to be done by our build generator. - Our inputs and outputs in build.ninja need to be listed in a deterministic order to avoid unwanted rebuilds. I've made a few other tweaks so the build file should now be fully-deterministic.
This commit is contained in:
parent
09c57369ad
commit
9701055eb5
14 changed files with 82 additions and 29 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -10,3 +10,5 @@ target
|
|||
/windows.bazelrc
|
||||
/out
|
||||
node_modules
|
||||
.n2_db
|
||||
.ninja_log
|
||||
|
|
7
Cargo.lock
generated
7
Cargo.lock
generated
|
@ -160,6 +160,8 @@ dependencies = [
|
|||
name = "anki_i18n"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anki_io",
|
||||
"anyhow",
|
||||
"fluent",
|
||||
"fluent-bundle",
|
||||
"fluent-syntax",
|
||||
|
@ -824,6 +826,8 @@ dependencies = [
|
|||
name = "configure"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"itertools",
|
||||
"ninja_gen",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
@ -2405,6 +2409,8 @@ checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54"
|
|||
name = "ninja_gen"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anki_io",
|
||||
"anyhow",
|
||||
"camino",
|
||||
"globset",
|
||||
"itertools",
|
||||
|
@ -3397,6 +3403,7 @@ dependencies = [
|
|||
"itertools",
|
||||
"junction",
|
||||
"termcolor",
|
||||
"which",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
|
|
|
@ -9,5 +9,7 @@ license.workspace = true
|
|||
rust-version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.71"
|
||||
itertools = "0.10.5"
|
||||
ninja_gen = { "path" = "../ninja_gen" }
|
||||
workspace-hack = { version = "0.1", path = "../../tools/workspace-hack" }
|
||||
|
|
|
@ -56,7 +56,7 @@ fn main() -> Result<()> {
|
|||
|
||||
build.trailing_text = "default pylib qt\n".into();
|
||||
|
||||
build.write_build_file();
|
||||
build.write_build_file()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -141,7 +141,10 @@ fn build_and_check_tslib(build: &mut Build) -> Result<()> {
|
|||
protos: inputs![glob!["proto/**/*.proto"]],
|
||||
include_dirs: &["proto"],
|
||||
out_dir: "out/ts/lib",
|
||||
out_path_transform: |path| path.replace("proto/", "ts/lib/"),
|
||||
out_path_transform: |path| {
|
||||
path.replace("proto/", "ts/lib/")
|
||||
.replace("proto\\", "ts/lib\\")
|
||||
},
|
||||
py_transform_script: "pylib/tools/markpure.py",
|
||||
},
|
||||
)?;
|
||||
|
|
|
@ -9,6 +9,8 @@ license.workspace = true
|
|||
rust-version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
anki_io = { version = "0.0.0", path = "../../rslib/io" }
|
||||
anyhow = "1.0.71"
|
||||
camino = "1.1.4"
|
||||
globset = "0.4.10"
|
||||
itertools = "0.10.5"
|
||||
|
|
|
@ -6,6 +6,7 @@ use std::collections::HashSet;
|
|||
use std::fmt::Write;
|
||||
|
||||
use camino::Utf8PathBuf;
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::action::BuildAction;
|
||||
use crate::archives::Platform;
|
||||
|
@ -266,11 +267,13 @@ impl BuildStatement<'_> {
|
|||
/// `existing_outputs`, and any subgroups.
|
||||
fn render_into(mut self, buf: &mut String) -> (Vec<String>, Vec<(String, Vec<String>)>) {
|
||||
let action_name = self.rule_name;
|
||||
self.implicit_inputs.sort();
|
||||
self.implicit_outputs.sort();
|
||||
let inputs_str = to_ninja_target_string(&self.explicit_inputs, &self.implicit_inputs);
|
||||
let outputs_str = to_ninja_target_string(&self.explicit_outputs, &self.implicit_outputs);
|
||||
|
||||
writeln!(buf, "build {outputs_str}: {action_name} {inputs_str}").unwrap();
|
||||
for (key, value) in self.variables {
|
||||
for (key, value) in self.variables.iter().sorted() {
|
||||
writeln!(buf, " {key} = {}", value).unwrap();
|
||||
}
|
||||
writeln!(buf).unwrap();
|
||||
|
|
|
@ -2,7 +2,11 @@
|
|||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::fmt::Write;
|
||||
use std::fs::read_to_string;
|
||||
|
||||
use anki_io::create_dir_all;
|
||||
use anki_io::write_file_if_changed;
|
||||
use anyhow::Result;
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::archives::with_exe;
|
||||
use crate::input::space_separated;
|
||||
|
@ -18,7 +22,6 @@ impl Build {
|
|||
)
|
||||
.unwrap();
|
||||
|
||||
writeln!(&mut buf, "builddir = {}", self.buildroot.as_str()).unwrap();
|
||||
writeln!(
|
||||
&mut buf,
|
||||
"runner = $builddir/rust/debug/{}",
|
||||
|
@ -26,11 +29,6 @@ impl Build {
|
|||
)
|
||||
.unwrap();
|
||||
|
||||
for (key, value) in &self.variables {
|
||||
writeln!(&mut buf, "{} = {}", key, value).unwrap();
|
||||
}
|
||||
buf.push('\n');
|
||||
|
||||
for (key, value) in &self.pools {
|
||||
writeln!(&mut buf, "pool {}\n depth = {}", key, value).unwrap();
|
||||
}
|
||||
|
@ -38,7 +36,7 @@ impl Build {
|
|||
|
||||
buf.push_str(&self.output_text);
|
||||
|
||||
for (group, targets) in &self.groups {
|
||||
for (group, targets) in self.groups.iter().sorted() {
|
||||
let group = group.replace(':', "_");
|
||||
writeln!(
|
||||
&mut buf,
|
||||
|
@ -51,20 +49,22 @@ impl Build {
|
|||
|
||||
buf.push_str(&self.trailing_text);
|
||||
|
||||
buf = buf.replace("$builddir", self.buildroot.as_str());
|
||||
for (key, value) in &self.variables {
|
||||
buf = buf.replace(
|
||||
&format!("${key}"),
|
||||
&value.replace("$builddir", self.buildroot.as_str()),
|
||||
);
|
||||
}
|
||||
|
||||
buf
|
||||
}
|
||||
|
||||
pub fn write_build_file(&self) {
|
||||
let existing_contents = read_to_string("build.ninja").unwrap_or_default();
|
||||
let new_contents = self.render();
|
||||
if existing_contents != new_contents {
|
||||
let folder = &self.buildroot;
|
||||
if !folder.exists() {
|
||||
std::fs::create_dir_all(folder).expect("create build dir");
|
||||
}
|
||||
std::fs::write(folder.join("build.ninja"), new_contents).expect("write build.ninja");
|
||||
}
|
||||
|
||||
// dbg!(&self.groups);
|
||||
pub fn write_build_file(&self) -> Result<()> {
|
||||
create_dir_all(&self.buildroot)?;
|
||||
let path = self.buildroot.join("build.ninja");
|
||||
let contents = self.render().into_bytes();
|
||||
write_file_if_changed(path, contents)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,4 +16,5 @@ clap = { version = "4.2.1", features = ["derive"] }
|
|||
itertools = "0.10.5"
|
||||
junction = "1.0.0"
|
||||
termcolor = "1.2.0"
|
||||
which = "4.4.0"
|
||||
workspace-hack = { version = "0.1", path = "../../tools/workspace-hack" }
|
||||
|
|
|
@ -56,7 +56,7 @@ pub fn run_build(args: BuildArgs) {
|
|||
let ninja_args = args.args.into_iter().map(|a| a.replace(':', "_"));
|
||||
|
||||
let start_time = Instant::now();
|
||||
let mut command = Command::new("ninja");
|
||||
let mut command = Command::new(get_ninja_command());
|
||||
command
|
||||
.arg("-f")
|
||||
.arg(&build_file)
|
||||
|
@ -91,7 +91,12 @@ pub fn run_build(args: BuildArgs) {
|
|||
stdout
|
||||
.set_color(ColorSpec::new().set_fg(Some(Color::Green)).set_bold(true))
|
||||
.unwrap();
|
||||
writeln!(&mut stdout, "\nBuild succeeded.").unwrap();
|
||||
writeln!(
|
||||
&mut stdout,
|
||||
"\nBuild succeeded in {:.2}s.",
|
||||
start_time.elapsed().as_secs_f32()
|
||||
)
|
||||
.unwrap();
|
||||
stdout.reset().unwrap();
|
||||
} else {
|
||||
stdout
|
||||
|
@ -104,6 +109,14 @@ pub fn run_build(args: BuildArgs) {
|
|||
}
|
||||
}
|
||||
|
||||
fn get_ninja_command() -> &'static str {
|
||||
if which::which("n2").is_ok() {
|
||||
"n2"
|
||||
} else {
|
||||
"ninja"
|
||||
}
|
||||
}
|
||||
|
||||
fn setup_build_root() -> &'static Utf8Path {
|
||||
let build_root = Utf8Path::new("out");
|
||||
|
||||
|
|
|
@ -21,6 +21,8 @@ unic-langid = { version = "0.9.1", features = ["macros"] }
|
|||
serde = { version = "1.0.159", features = ["derive"] }
|
||||
serde_json = "1.0.95"
|
||||
inflections = "1.1.1"
|
||||
anki_io = { version = "0.0.0", path = "../io" }
|
||||
anyhow = "1.0.71"
|
||||
|
||||
[dependencies]
|
||||
fluent = "0.16.0"
|
||||
|
|
|
@ -6,8 +6,8 @@ mod extract;
|
|||
mod gather;
|
||||
mod write_strings;
|
||||
|
||||
use std::fs;
|
||||
|
||||
use anki_io::write_file_if_changed;
|
||||
use anyhow::Result;
|
||||
use check::check;
|
||||
use extract::get_modules;
|
||||
use gather::get_ftl_data;
|
||||
|
@ -15,7 +15,7 @@ use write_strings::write_strings;
|
|||
|
||||
// fixme: check all variables are present in translations as well?
|
||||
|
||||
fn main() {
|
||||
fn main() -> Result<()> {
|
||||
// generate our own requirements
|
||||
let map = get_ftl_data();
|
||||
check(&map);
|
||||
|
@ -26,6 +26,7 @@ fn main() {
|
|||
println!("cargo:rerun-if-env-changed=STRINGS_JSON");
|
||||
if let Some(path) = option_env!("STRINGS_JSON") {
|
||||
let meta_json = serde_json::to_string_pretty(&modules).unwrap();
|
||||
fs::write(path, meta_json).unwrap();
|
||||
write_file_if_changed(path, meta_json)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -199,6 +199,20 @@ impl Iterator for ReadDirFiles {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn write_file_if_changed(path: impl AsRef<Path>, contents: impl AsRef<[u8]>) -> Result<()> {
|
||||
let path = path.as_ref();
|
||||
let contents = contents.as_ref();
|
||||
let changed = {
|
||||
read_file(path)
|
||||
.map(|existing| existing != contents)
|
||||
.unwrap_or(true)
|
||||
};
|
||||
if changed {
|
||||
write_file(path, contents)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
|
3
tools/install-n2
Executable file
3
tools/install-n2
Executable file
|
@ -0,0 +1,3 @@
|
|||
#!/bin/bash
|
||||
|
||||
cargo install --git https://github.com/evmar/n2.git --rev e73378d693716715be1a420aa74a2836b49b85c8
|
Loading…
Reference in a new issue