Switch Rust import style (#2330)

* Prepare to switch Rust import style

* Run nightly format

Closes #2320

* Clean up a few imports

* Enable comment wrapping

* Wrap comments
This commit is contained in:
Damien Elmes 2023-01-18 21:39:55 +10:00 committed by GitHub
parent 9d84f357b6
commit ded805b504
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
311 changed files with 3410 additions and 2687 deletions

View file

@ -1,8 +1,7 @@
# this is not supported on stable Rust, and is ignored by the Bazel rules; it is only
# useful for manual invocation with 'cargo +nightly fmt'
imports_granularity = "Crate"
# These settings are not supported on stable Rust, and are ignored by the ninja
# build script - to use them you need to run 'cargo +nightly fmt'
group_imports = "StdExternalCrate"
# wrap_comments = true
# imports_granularity = "Item"
# imports_layout = "Vertical"
imports_granularity = "Item"
imports_layout = "Vertical"
wrap_comments = true
ignore = ["ascii_percent_encoding"]

View file

@ -1,7 +1,9 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{error::Error, fs, io::Read};
use std::error::Error;
use std::fs;
use std::io::Read;
use camino::Utf8Path;
use sha2::Digest;
@ -117,7 +119,8 @@ fn extract(archive_path: &str, output_folder: &str) -> Result<()> {
archive.extract(&output_tmp)?;
}
}
// if the output folder contains a single folder (eg foo-1.2), move it up a level
// if the output folder contains a single folder (eg foo-1.2), move it up a
// level
let mut entries: Vec<_> = output_tmp.read_dir_utf8()?.take(2).collect();
let first_entry = entries.pop().unwrap()?;
if entries.is_empty() && first_entry.metadata()?.is_dir() {

View file

@ -1,21 +1,27 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use ninja_gen::{
action::BuildAction,
command::RunCommand,
copy::{CopyFile, CopyFiles},
glob, hashmap, inputs,
node::{CompileSass, EsbuildScript, TypescriptCheck},
python::{python_format, PythonTest},
Build, Result, Utf8Path, Utf8PathBuf,
};
use ninja_gen::action::BuildAction;
use ninja_gen::command::RunCommand;
use ninja_gen::copy::CopyFile;
use ninja_gen::copy::CopyFiles;
use ninja_gen::glob;
use ninja_gen::hashmap;
use ninja_gen::inputs;
use ninja_gen::node::CompileSass;
use ninja_gen::node::EsbuildScript;
use ninja_gen::node::TypescriptCheck;
use ninja_gen::python::python_format;
use ninja_gen::python::PythonTest;
use ninja_gen::Build;
use ninja_gen::Result;
use ninja_gen::Utf8Path;
use ninja_gen::Utf8PathBuf;
use crate::{
anki_version,
python::BuildWheel,
web::{copy_mathjax, eslint},
};
use crate::anki_version;
use crate::python::BuildWheel;
use crate::web::copy_mathjax;
use crate::web::eslint;
pub fn build_and_check_aqt(build: &mut Build) -> Result<()> {
build_forms(build)?;
@ -294,7 +300,8 @@ impl BuildAction for BuildThemedIcon<'_> {
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
let stem = self.src_icon.file_stem().unwrap();
// eg foo-light.svg, foo-dark.svg, foo-FG_SUBTLE-light.svg, foo-FG_SUBTLE-dark.svg
// eg foo-light.svg, foo-dark.svg, foo-FG_SUBTLE-light.svg,
// foo-FG_SUBTLE-dark.svg
let outputs: Vec<_> = self
.colors
.iter()

View file

@ -1,22 +1,25 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use ninja_gen::{
action::BuildAction,
archives::{download_and_extract, empty_manifest, with_exe, OnlineArchive},
cargo::{CargoBuild, RustOutput},
git::SyncSubmodule,
glob,
input::BuildInput,
inputs,
python::PythonEnvironment,
Build, Result, Utf8Path,
};
use ninja_gen::action::BuildAction;
use ninja_gen::archives::download_and_extract;
use ninja_gen::archives::empty_manifest;
use ninja_gen::archives::with_exe;
use ninja_gen::archives::OnlineArchive;
use ninja_gen::cargo::CargoBuild;
use ninja_gen::cargo::RustOutput;
use ninja_gen::git::SyncSubmodule;
use ninja_gen::glob;
use ninja_gen::input::BuildInput;
use ninja_gen::inputs;
use ninja_gen::python::PythonEnvironment;
use ninja_gen::Build;
use ninja_gen::Result;
use ninja_gen::Utf8Path;
use crate::{
anki_version,
platform::{overriden_python_target_platform, overriden_rust_target_triple},
};
use crate::anki_version;
use crate::platform::overriden_python_target_platform;
use crate::platform::overriden_rust_target_triple;
#[derive(Debug, PartialEq, Eq)]
enum DistKind {

View file

@ -12,11 +12,18 @@ mod web;
use aqt::build_and_check_aqt;
use bundle::build_bundle;
use ninja_gen::{Build, Result};
use pylib::{build_pylib, check_pylib};
use python::{check_copyright, check_python, setup_python, setup_venv};
use rust::{build_rust, check_rust};
use web::{build_and_check_web, check_sql};
use ninja_gen::Build;
use ninja_gen::Result;
use pylib::build_pylib;
use pylib::check_pylib;
use python::check_copyright;
use python::check_python;
use python::setup_python;
use python::setup_venv;
use rust::build_rust;
use rust::check_rust;
use web::build_and_check_web;
use web::check_sql;
use crate::proto::check_proto;

View file

@ -5,14 +5,14 @@ use std::env;
use ninja_gen::archives::Platform;
/// Usually None to use the host architecture; can be overriden by setting MAC_X86
/// to build for x86_64 on Apple Silicon
/// Usually None to use the host architecture; can be overriden by setting
/// MAC_X86 to build for x86_64 on Apple Silicon
pub fn overriden_rust_target_triple() -> Option<&'static str> {
overriden_python_target_platform().map(|p| p.as_rust_triple())
}
/// Usually None to use the host architecture; can be overriden by setting MAC_X86
/// to build for x86_64 on Apple Silicon
/// Usually None to use the host architecture; can be overriden by setting
/// MAC_X86 to build for x86_64 on Apple Silicon
pub fn overriden_python_target_platform() -> Option<Platform> {
if env::var("MAC_X86").is_ok() {
Some(Platform::MacX64)

View file

@ -1,12 +1,15 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use ninja_gen::{
archives::{download_and_extract, with_exe},
glob, hashmap, inputs,
protobuf::{protoc_archive, ClangFormat},
Build, Result,
};
use ninja_gen::archives::download_and_extract;
use ninja_gen::archives::with_exe;
use ninja_gen::glob;
use ninja_gen::hashmap;
use ninja_gen::inputs;
use ninja_gen::protobuf::protoc_archive;
use ninja_gen::protobuf::ClangFormat;
use ninja_gen::Build;
use ninja_gen::Result;
pub fn download_protoc(build: &mut Build) -> Result<()> {
download_and_extract(

View file

@ -1,20 +1,21 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use ninja_gen::{
action::BuildAction,
archives::Platform,
command::RunCommand,
copy::LinkFile,
glob, hashmap, inputs,
python::{python_format, PythonTest},
Build, Result,
};
use ninja_gen::action::BuildAction;
use ninja_gen::archives::Platform;
use ninja_gen::command::RunCommand;
use ninja_gen::copy::LinkFile;
use ninja_gen::glob;
use ninja_gen::hashmap;
use ninja_gen::inputs;
use ninja_gen::python::python_format;
use ninja_gen::python::PythonTest;
use ninja_gen::Build;
use ninja_gen::Result;
use crate::{
platform::overriden_python_target_platform,
python::{BuildWheel, GenPythonProto},
};
use crate::platform::overriden_python_target_platform;
use crate::python::BuildWheel;
use crate::python::GenPythonProto;
pub fn build_pylib(build: &mut Build) -> Result<()> {
// generated files

View file

@ -3,18 +3,24 @@
use std::env;
use ninja_gen::{
action::BuildAction,
archives::{download_and_extract, OnlineArchive, Platform},
build::FilesHandle,
command::RunCommand,
glob, hashmap,
input::BuildInput,
inputs,
python::{python_format, PythonEnvironment, PythonLint, PythonTypecheck},
rsync::RsyncFiles,
Build, Result, Utf8Path,
};
use ninja_gen::action::BuildAction;
use ninja_gen::archives::download_and_extract;
use ninja_gen::archives::OnlineArchive;
use ninja_gen::archives::Platform;
use ninja_gen::build::FilesHandle;
use ninja_gen::command::RunCommand;
use ninja_gen::glob;
use ninja_gen::hashmap;
use ninja_gen::input::BuildInput;
use ninja_gen::inputs;
use ninja_gen::python::python_format;
use ninja_gen::python::PythonEnvironment;
use ninja_gen::python::PythonLint;
use ninja_gen::python::PythonTypecheck;
use ninja_gen::rsync::RsyncFiles;
use ninja_gen::Build;
use ninja_gen::Result;
use ninja_gen::Utf8Path;
fn python_archive(platform: Platform) -> OnlineArchive {
match platform {
@ -252,8 +258,8 @@ pub fn check_python(build: &mut Build) -> Result<()> {
fn add_pylint(build: &mut Build) -> Result<()> {
// pylint does not support PEP420 implicit namespaces split across import paths,
// so we need to merge our pylib sources and generated files before invoking it, and
// add a top-level __init__.py
// so we need to merge our pylib sources and generated files before invoking it,
// and add a top-level __init__.py
build.add(
"pylint/anki",
RsyncFiles {

View file

@ -1,13 +1,20 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use ninja_gen::{
cargo::{CargoBuild, CargoClippy, CargoFormat, CargoRun, CargoTest, RustOutput},
git::SyncSubmodule,
glob, inputs, Build, Result,
};
use ninja_gen::cargo::CargoBuild;
use ninja_gen::cargo::CargoClippy;
use ninja_gen::cargo::CargoFormat;
use ninja_gen::cargo::CargoRun;
use ninja_gen::cargo::CargoTest;
use ninja_gen::cargo::RustOutput;
use ninja_gen::git::SyncSubmodule;
use ninja_gen::glob;
use ninja_gen::inputs;
use ninja_gen::Build;
use ninja_gen::Result;
use crate::{platform::overriden_rust_target_triple, proto::download_protoc};
use crate::platform::overriden_rust_target_triple;
use crate::proto::download_protoc;
pub fn build_rust(build: &mut Build) -> Result<()> {
prepare_translations(build)?;

View file

@ -2,19 +2,25 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
// use super::*;
use ninja_gen::{
action::BuildAction,
command::RunCommand,
glob, hashmap,
input::BuildInput,
inputs,
node::{
node_archive, CompileSass, DPrint, EsbuildScript, Eslint, GenTypescriptProto, JestTest,
SqlFormat, SvelteCheck, TypescriptCheck,
},
rsync::RsyncFiles,
Build, Result,
};
use ninja_gen::action::BuildAction;
use ninja_gen::command::RunCommand;
use ninja_gen::glob;
use ninja_gen::hashmap;
use ninja_gen::input::BuildInput;
use ninja_gen::inputs;
use ninja_gen::node::node_archive;
use ninja_gen::node::CompileSass;
use ninja_gen::node::DPrint;
use ninja_gen::node::EsbuildScript;
use ninja_gen::node::Eslint;
use ninja_gen::node::GenTypescriptProto;
use ninja_gen::node::JestTest;
use ninja_gen::node::SqlFormat;
use ninja_gen::node::SvelteCheck;
use ninja_gen::node::TypescriptCheck;
use ninja_gen::rsync::RsyncFiles;
use ninja_gen::Build;
use ninja_gen::Result;
pub fn build_and_check_web(build: &mut Build) -> Result<()> {
setup_node(build)?;

View file

@ -1,7 +1,9 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{build::FilesHandle, Build, Result};
use crate::build::FilesHandle;
use crate::Build;
use crate::Result;
pub trait BuildAction {
/// Command line to invoke for each build statement.
@ -10,8 +12,9 @@ pub trait BuildAction {
/// Declare the input files and variables, and output files.
fn files(&mut self, build: &mut impl FilesHandle);
/// If true, this action will not trigger a rebuild of dependent targets if the output
/// files are unchanged. This corresponds to Ninja's "restat" argument.
/// If true, this action will not trigger a rebuild of dependent targets if
/// the output files are unchanged. This corresponds to Ninja's "restat"
/// argument.
fn check_output_timestamps(&self) -> bool {
false
}
@ -21,8 +24,8 @@ pub trait BuildAction {
false
}
/// Called on first action invocation; can be used to inject other build actions
/// to perform initial setup.
/// Called on first action invocation; can be used to inject other build
/// actions to perform initial setup.
#[allow(unused_variables)]
fn on_first_instance(&self, build: &mut Build) -> Result<()> {
Ok(())

View file

@ -1,17 +1,20 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{borrow::Cow, collections::HashMap};
use std::borrow::Cow;
use std::collections::HashMap;
use camino::{Utf8Path, Utf8PathBuf};
use camino::Utf8Path;
use camino::Utf8PathBuf;
use crate::{
action::BuildAction,
cargo::{CargoBuild, RustOutput},
glob,
input::BuildInput,
inputs, Build, Result,
};
use crate::action::BuildAction;
use crate::cargo::CargoBuild;
use crate::cargo::RustOutput;
use crate::glob;
use crate::input::BuildInput;
use crate::inputs;
use crate::Build;
use crate::Result;
#[derive(Clone, Copy, Debug)]
pub struct OnlineArchive {
@ -98,12 +101,14 @@ impl BuildAction for DownloadArchive {
struct ExtractArchive<'a, I> {
pub archive_path: BuildInput,
/// The folder that the archive should be extracted into, relative to $builddir/extracted.
/// If the archive contains a single top-level folder, its contents will be extracted into the
/// provided folder, so that output like tool-1.2/ can be extracted into tool/.
/// The folder that the archive should be extracted into, relative to
/// $builddir/extracted. If the archive contains a single top-level
/// folder, its contents will be extracted into the provided folder, so
/// that output like tool-1.2/ can be extracted into tool/.
pub extraction_folder_name: &'a str,
/// Files contained inside the archive, relative to the archive root, and excluding the top-level
/// folder if it is the sole top-level entry. Any files you wish to use as part of subsequent rules
/// Files contained inside the archive, relative to the archive root, and
/// excluding the top-level folder if it is the sole top-level entry.
/// Any files you wish to use as part of subsequent rules
/// must be declared here.
pub file_manifest: HashMap<&'static str, I>,
}

View file

@ -1,20 +1,18 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{
collections::{HashMap, HashSet},
fmt::Write,
};
use std::collections::HashMap;
use std::collections::HashSet;
use std::fmt::Write;
use camino::Utf8PathBuf;
use crate::{
action::BuildAction,
archives::Platform,
configure::ConfigureBuild,
input::{space_separated, BuildInput},
Result,
};
use crate::action::BuildAction;
use crate::archives::Platform;
use crate::configure::ConfigureBuild;
use crate::input::space_separated;
use crate::input::BuildInput;
use crate::Result;
#[derive(Debug)]
pub struct Build {
@ -64,8 +62,8 @@ impl Build {
self.pools.push((name, size));
}
/// Evaluate the provided closure only once, using `key` to determine uniqueness.
/// This key should not match any build action name.
/// Evaluate the provided closure only once, using `key` to determine
/// uniqueness. This key should not match any build action name.
pub fn once_only(
&mut self,
key: &'static str,
@ -155,15 +153,16 @@ rule {action_name}
self.add_resolved_files_to_group(target_group, &additional_files.clone())
}
/// Outputs from a given build statement group. An error if no files have been registered yet.
/// Outputs from a given build statement group. An error if no files have
/// been registered yet.
pub fn group_outputs(&self, group_name: &'static str) -> &[String] {
self.groups
.get(group_name)
.unwrap_or_else(|| panic!("expected files in {group_name}"))
}
/// Single output from a given build statement group. An error if no files have been registered yet,
/// or more than one file has been registered.
/// Single output from a given build statement group. An error if no files
/// have been registered yet, or more than one file has been registered.
pub fn group_output(&self, group_name: &'static str) -> String {
let outputs = self.group_outputs(group_name);
assert_eq!(outputs.len(), 1);
@ -201,8 +200,8 @@ fn split_groups(group: &str) -> Vec<&str> {
}
struct BuildStatement<'a> {
/// Cache of outputs by already-evaluated build rules, allowing later rules to more easily consume
/// the outputs of previous rules.
/// Cache of outputs by already-evaluated build rules, allowing later rules
/// to more easily consume the outputs of previous rules.
existing_outputs: &'a HashMap<String, Vec<String>>,
rule_name: &'static str,
// implicit refers to files that are not automatically assigned to $in and $out by Ninja,
@ -260,8 +259,8 @@ impl BuildStatement<'_> {
stmt
}
/// Returns a list of all output files, which `Build` will add to `existing_outputs`,
/// and any subgroups.
/// Returns a list of all output files, which `Build` will add to
/// `existing_outputs`, and any subgroups.
fn render_into(mut self, buf: &mut String) -> (Vec<String>, Vec<(String, Vec<String>)>) {
let action_name = self.rule_name;
let inputs_str = to_ninja_target_string(&self.explicit_inputs, &self.implicit_inputs);
@ -344,8 +343,9 @@ pub trait FilesHandle {
/// created so the file list can be accessed in the command. By convention,
/// this is often `out`.
/// - If subgroup is true, the files are also placed in a subgroup. Eg
/// if a rule `foo` exists and subgroup `bar` is provided, the files are accessible
/// via `:foo:bar`. The variable name must not be empty, or called `out`.
/// if a rule `foo` exists and subgroup `bar` is provided, the files are
/// accessible via `:foo:bar`. The variable name must not be empty, or
/// called `out`.
fn add_outputs_ext(
&mut self,
variable: impl Into<String>,
@ -357,8 +357,8 @@ pub trait FilesHandle {
fn add_output_stamp(&mut self, path: impl Into<String>);
/// Set an env var for the duration of the provided command(s).
/// Note this is defined once for the rule, so if the value should change
/// for each command, `constant_value` should reference a `$variable` you have
/// defined.
/// for each command, `constant_value` should reference a `$variable` you
/// have defined.
fn add_env_var(&mut self, key: &str, constant_value: &str);
fn release_build(&self) -> bool;

View file

@ -1,12 +1,16 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use camino::{Utf8Path, Utf8PathBuf};
use camino::Utf8Path;
use camino::Utf8PathBuf;
use crate::{
action::BuildAction, archives::with_exe, build::FilesHandle, input::BuildInput, inputs, Build,
Result,
};
use crate::action::BuildAction;
use crate::archives::with_exe;
use crate::build::FilesHandle;
use crate::input::BuildInput;
use crate::inputs;
use crate::Build;
use crate::Result;
#[derive(Debug, PartialEq, Eq)]
pub enum RustOutput<'a> {
@ -191,8 +195,8 @@ impl BuildAction for CargoFormat {
}
}
/// Use Cargo to download and build a Rust binary. If `binary_name` is `foo`, a `$foo` variable
/// will be defined with the path to the binary.
/// Use Cargo to download and build a Rust binary. If `binary_name` is `foo`, a
/// `$foo` variable will be defined with the path to the binary.
pub struct CargoInstall {
pub binary_name: &'static str,
/// eg 'foo --version 1.3' or '--git git://...'

View file

@ -3,11 +3,10 @@
use std::collections::HashMap;
use crate::{
action::BuildAction,
input::{space_separated, BuildInput},
inputs,
};
use crate::action::BuildAction;
use crate::input::space_separated;
use crate::input::BuildInput;
use crate::inputs;
pub struct RunCommand<'a> {
// Will be automatically included as a dependency
@ -24,8 +23,9 @@ impl BuildAction for RunCommand<'_> {
}
fn files(&mut self, build: &mut impl crate::build::FilesHandle) {
// Because we've defined a generic rule instead of making one for a specific use case,
// we need to manually intepolate variables in the user-provided args.
// Because we've defined a generic rule instead of making one for a specific use
// case, we need to manually intepolate variables in the user-provided
// args.
let mut args = self.args.to_string();
for (key, inputs) in &self.inputs {
let files = build.expand_inputs(inputs);

View file

@ -1,12 +1,14 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{
action::BuildAction,
build::FilesHandle,
cargo::{CargoBuild, RustOutput},
glob, inputs, Build, Result,
};
use crate::action::BuildAction;
use crate::build::FilesHandle;
use crate::cargo::CargoBuild;
use crate::cargo::RustOutput;
use crate::glob;
use crate::inputs;
use crate::Build;
use crate::Result;
pub struct ConfigureBuild {}

View file

@ -3,14 +3,16 @@
use camino::Utf8Path;
use crate::{action::BuildAction, input::BuildInput};
use crate::action::BuildAction;
use crate::input::BuildInput;
/// Copy the provided files into the specified destination folder.
/// Directory structure is not preserved - eg foo/bar.js is copied
/// into out/$output_folder/bar.js.
pub struct CopyFiles<'a> {
pub inputs: BuildInput,
/// The folder (relative to the build folder) that files should be copied into.
/// The folder (relative to the build folder) that files should be copied
/// into.
pub output_folder: &'a str,
}
@ -51,8 +53,9 @@ impl BuildAction for CopyFile<'_> {
}
}
/// Create a symbolic link to the provided output path, which should be relative to
/// the output folder. This can be used to create a copy with a different name.
/// Create a symbolic link to the provided output path, which should be relative
/// to the output folder. This can be used to create a copy with a different
/// name.
pub struct LinkFile<'a> {
pub input: BuildInput,
pub output: &'a str,

View file

@ -4,7 +4,8 @@
use itertools::Itertools;
use super::*;
use crate::{action::BuildAction, input::BuildInput};
use crate::action::BuildAction;
use crate::input::BuildInput;
pub struct SyncSubmodule {
pub path: &'static str,

View file

@ -1,10 +1,9 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use std::collections::hash_map::DefaultHasher;
use std::hash::Hash;
use std::hash::Hasher;
pub fn simple_hash(hashable: impl Hash) -> u64 {
let mut hasher = DefaultHasher::new();

View file

@ -1,7 +1,8 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{collections::HashMap, fmt::Display};
use std::collections::HashMap;
use std::fmt::Display;
use camino::Utf8PathBuf;
@ -126,7 +127,8 @@ lazy_static::lazy_static! {
static ref CACHED_FILES: Vec<Utf8PathBuf> = cache_files();
}
/// Walking the source tree once instead of for each glob yields ~4x speed improvements.
/// Walking the source tree once instead of for each glob yields ~4x speed
/// improvements.
fn cache_files() -> Vec<Utf8PathBuf> {
walkdir::WalkDir::new(".")
// ensure the output order is predictable

View file

@ -19,7 +19,8 @@ pub mod rsync;
pub mod sass;
pub use build::Build;
pub use camino::{Utf8Path, Utf8PathBuf};
pub use camino::Utf8Path;
pub use camino::Utf8PathBuf;
pub use maplit::hashmap;
pub use which::which;

View file

@ -1,15 +1,17 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{borrow::Cow, collections::HashMap};
use std::borrow::Cow;
use std::collections::HashMap;
use super::*;
use crate::{
action::BuildAction,
archives::{download_and_extract, OnlineArchive, Platform},
hash::simple_hash,
input::{space_separated, BuildInput},
};
use crate::action::BuildAction;
use crate::archives::download_and_extract;
use crate::archives::OnlineArchive;
use crate::archives::Platform;
use crate::hash::simple_hash;
use crate::input::space_separated;
use crate::input::BuildInput;
pub fn node_archive(platform: Platform) -> OnlineArchive {
match platform {

View file

@ -3,13 +3,14 @@
use maplit::hashmap;
use crate::{
action::BuildAction,
archives::{download_and_extract, with_exe, OnlineArchive, Platform},
hash::simple_hash,
input::BuildInput,
inputs,
};
use crate::action::BuildAction;
use crate::archives::download_and_extract;
use crate::archives::with_exe;
use crate::archives::OnlineArchive;
use crate::archives::Platform;
use crate::hash::simple_hash;
use crate::input::BuildInput;
use crate::inputs;
pub fn protoc_archive(platform: Platform) -> OnlineArchive {
match platform {

View file

@ -1,7 +1,12 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{action::BuildAction, hash::simple_hash, input::BuildInput, inputs, Build, Result};
use crate::action::BuildAction;
use crate::hash::simple_hash;
use crate::input::BuildInput;
use crate::inputs;
use crate::Build;
use crate::Result;
pub struct PythonEnvironment<'a> {
pub folder: &'static str,

View file

@ -1,9 +1,12 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{fmt::Write, fs::read_to_string};
use std::fmt::Write;
use std::fs::read_to_string;
use crate::{archives::with_exe, input::space_separated, Build};
use crate::archives::with_exe;
use crate::input::space_separated;
use crate::Build;
impl Build {
pub fn render(&self) -> String {

View file

@ -3,16 +3,16 @@
use camino::Utf8Path;
use crate::{
action::BuildAction,
build::FilesHandle,
input::{space_separated, BuildInput},
};
use crate::action::BuildAction;
use crate::build::FilesHandle;
use crate::input::space_separated;
use crate::input::BuildInput;
/// Rsync the provided inputs into `output_folder`, preserving directory structure,
/// eg foo/bar.js -> out/$target_folder/foo/bar.js. `strip_prefix` can be used to
/// remove a portion of the the path when copying. If the input files are from previous
/// build outputs, the prefix should begin with `$builddir/`.
/// Rsync the provided inputs into `output_folder`, preserving directory
/// structure, eg foo/bar.js -> out/$target_folder/foo/bar.js. `strip_prefix`
/// can be used to remove a portion of the the path when copying. If the input
/// files are from previous build outputs, the prefix should begin with
/// `$builddir/`.
pub struct RsyncFiles<'a> {
pub inputs: BuildInput,
pub target_folder: &'a str,

View file

@ -1,12 +1,13 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{
action::BuildAction,
cargo::CargoInstall,
input::{space_separated, BuildInput},
inputs, Build, Result,
};
use crate::action::BuildAction;
use crate::cargo::CargoInstall;
use crate::input::space_separated;
use crate::input::BuildInput;
use crate::inputs;
use crate::Build;
use crate::Result;
pub struct CompileSassWithGrass {
pub input: BuildInput,

View file

@ -1,11 +1,19 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{env, fs, io::Write, process::Command, time::Instant};
use std::env;
use std::fs;
use std::io::Write;
use std::process::Command;
use std::time::Instant;
use camino::Utf8Path;
use clap::Args;
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
use termcolor::Color;
use termcolor::ColorChoice;
use termcolor::ColorSpec;
use termcolor::StandardStream;
use termcolor::WriteColor;
#[derive(Args)]
pub struct BuildArgs {
@ -39,7 +47,8 @@ pub fn run_build(args: BuildArgs) {
maybe_reconfigure_build(&build_file, &path);
}
// automatically convert foo:bar references to foo_bar, as Ninja can not represent the former
// automatically convert foo:bar references to foo_bar, as Ninja can not
// represent the former
let ninja_args = args.args.into_iter().map(|a| a.replace(':', "_"));
let start_time = Instant::now();
@ -66,9 +75,10 @@ pub fn run_build(args: BuildArgs) {
// run build
let mut status = command.status().expect("ninja not installed");
if !status.success() && Instant::now().duration_since(start_time).as_secs() < 3 {
// if the build fails quickly, there's a reasonable chance that build.ninja references
// a file that has been renamed/deleted. We currently don't capture stderr, so we can't
// confirm, but in case that's the case, we regenerate the build.ninja file then try again.
// if the build fails quickly, there's a reasonable chance that build.ninja
// references a file that has been renamed/deleted. We currently don't
// capture stderr, so we can't confirm, but in case that's the case, we
// regenerate the build.ninja file then try again.
bootstrap_build();
status = command.status().expect("ninja missing");
}

View file

@ -1,7 +1,9 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{env, fs, process::Command};
use std::env;
use std::fs;
use std::process::Command;
use camino::Utf8PathBuf;
use clap::Args;
@ -15,8 +17,8 @@ pub struct BuildArtifactsArgs {
}
pub fn build_artifacts(args: BuildArtifactsArgs) {
// build.rs doesn't declare inputs from venv, so we need to force a rebuild to ensure
// changes to our libs/the venv get included
// build.rs doesn't declare inputs from venv, so we need to force a rebuild to
// ensure changes to our libs/the venv get included
let artifacts = args.bundle_root.join("artifacts");
if artifacts.exists() {
fs::remove_dir_all(&artifacts).unwrap();

View file

@ -1,15 +1,18 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{env, fs, process::Command};
use std::env;
use std::fs;
use std::process::Command;
use camino::{Utf8Path, Utf8PathBuf};
use clap::{Args, ValueEnum};
use camino::Utf8Path;
use camino::Utf8PathBuf;
use clap::Args;
use clap::ValueEnum;
use crate::{
paths::{absolute_msys_path, unix_path},
run::run_silent,
};
use crate::paths::absolute_msys_path;
use crate::paths::unix_path;
use crate::run::run_silent;
#[derive(Clone, Copy, ValueEnum, Debug)]
enum DistKind {
@ -26,8 +29,8 @@ pub struct BuildDistFolderArgs {
pub fn build_dist_folder(args: BuildDistFolderArgs) {
let BuildDistFolderArgs { kind, folder_root } = args;
fs::create_dir_all(&folder_root).unwrap();
// Start with Qt, as it's the largest, and we use --delete to ensure there are no
// stale files in lib/. Skipped on macOS as Qt is handled later.
// Start with Qt, as it's the largest, and we use --delete to ensure there are
// no stale files in lib/. Skipped on macOS as Qt is handled later.
if !cfg!(target_os = "macos") {
copy_qt_from_venv(kind, &folder_root);
}

View file

@ -15,18 +15,23 @@ mod yarn;
use std::error::Error;
use build::{run_build, BuildArgs};
use bundle::{
artifacts::{build_artifacts, BuildArtifactsArgs},
binary::build_bundle_binary,
folder::{build_dist_folder, BuildDistFolderArgs},
};
// use bundle::{build_bundle_binary, build_dist_folder, BuildDistFolderArgs};
use clap::{Parser, Subcommand};
use pyenv::{setup_pyenv, PyenvArgs};
use rsync::{rsync_files, RsyncArgs};
use run::{run_commands, RunArgs};
use yarn::{setup_yarn, YarnArgs};
use build::run_build;
use build::BuildArgs;
use bundle::artifacts::build_artifacts;
use bundle::artifacts::BuildArtifactsArgs;
use bundle::binary::build_bundle_binary;
use bundle::folder::build_dist_folder;
use bundle::folder::BuildDistFolderArgs;
use clap::Parser;
use clap::Subcommand;
use pyenv::setup_pyenv;
use pyenv::PyenvArgs;
use rsync::rsync_files;
use rsync::RsyncArgs;
use run::run_commands;
use run::RunArgs;
use yarn::setup_yarn;
use yarn::YarnArgs;
pub type Result<T, E = Box<dyn Error>> = std::result::Result<T, E>;

View file

@ -3,8 +3,8 @@
use camino::Utf8Path;
/// On Unix, just a normal path. On Windows, c:\foo\bar.txt becomes /c/foo/bar.txt,
/// which msys rsync expects.
/// On Unix, just a normal path. On Windows, c:\foo\bar.txt becomes
/// /c/foo/bar.txt, which msys rsync expects.
pub fn absolute_msys_path(path: &Utf8Path) -> String {
let path = path.canonicalize_utf8().unwrap().into_string();
if !cfg!(windows) {

View file

@ -18,7 +18,8 @@ pub struct PyenvArgs {
venv_args: Vec<String>,
}
/// Set up a venv if one doesn't already exist, and then sync packages with provided requirements file.
/// Set up a venv if one doesn't already exist, and then sync packages with
/// provided requirements file.
pub fn setup_pyenv(args: PyenvArgs) {
let pyenv_folder = Utf8Path::new(&args.pyenv_folder);
@ -35,8 +36,8 @@ pub fn setup_pyenv(args: PyenvArgs) {
);
if cfg!(windows) {
// the first install on Windows throws an error the first time pip is upgraded, so we install
// it twice and swallow the first error
// the first install on Windows throws an error the first time pip is upgraded,
// so we install it twice and swallow the first error
let _output = Command::new(&pyenv_python)
.args(["-m", "pip", "install", "-r", &args.initial_reqs])
.output()

View file

@ -6,7 +6,8 @@ use std::process::Command;
use camino::Utf8Path;
use clap::Args;
use crate::{paths::absolute_msys_path, run::run_silent};
use crate::paths::absolute_msys_path;
use crate::run::run_silent;
#[derive(Args)]
pub struct RsyncArgs {

View file

@ -1,10 +1,9 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{
io::ErrorKind,
process::{Command, Output},
};
use std::io::ErrorKind;
use std::process::Command;
use std::process::Output;
use clap::Args;

View file

@ -1,7 +1,8 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{path::Path, process::Command};
use std::path::Path;
use std::process::Command;
use clap::Args;
@ -21,8 +22,8 @@ pub fn setup_yarn(args: YarnArgs) {
std::fs::write(args.stamp, b"").unwrap();
}
/// Unfortunately a lot of the node ecosystem expects the output folder to reside
/// in the repo root, so we need to link in our output folder.
/// Unfortunately a lot of the node ecosystem expects the output folder to
/// reside in the repo root, so we need to link in our output folder.
#[cfg(not(windows))]
fn link_node_modules() {
let target = Path::new("node_modules");
@ -35,9 +36,10 @@ fn link_node_modules() {
}
}
/// Things are more complicated on Windows - having $root/node_modules point to $root/out/node_modules
/// breaks our globs for some reason, so we create the junction in the opposite direction instead.
/// Ninja will have already created some empty folders based on our declared outputs, so we move the
/// Things are more complicated on Windows - having $root/node_modules point to
/// $root/out/node_modules breaks our globs for some reason, so we create the
/// junction in the opposite direction instead. Ninja will have already created
/// some empty folders based on our declared outputs, so we move the
/// created folder into the root.
#[cfg(windows)]
fn link_node_modules() {

View file

@ -8,7 +8,8 @@
use std::process::Command;
use camino::Utf8Path;
use snafu::{prelude::*, Whatever};
use snafu::prelude::*;
use snafu::Whatever;
type Result<T> = std::result::Result<T, Whatever>;

View file

@ -1,14 +1,15 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki::{
backend::{init_backend, Backend as RustBackend},
log::set_global_logger,
sync::http_server::SimpleServer,
};
use pyo3::{
create_exception, exceptions::PyException, prelude::*, types::PyBytes, wrap_pyfunction,
};
use anki::backend::init_backend;
use anki::backend::Backend as RustBackend;
use anki::log::set_global_logger;
use anki::sync::http_server::SimpleServer;
use pyo3::create_exception;
use pyo3::exceptions::PyException;
use pyo3::prelude::*;
use pyo3::types::PyBytes;
use pyo3::wrap_pyfunction;
#[pyclass(module = "_rsbridge")]
struct Backend {

View file

@ -1,10 +1,13 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{env, process::Command};
use std::env;
use std::process::Command;
use anyhow::{bail, Result};
use camino::{Utf8Path, Utf8PathBuf};
use anyhow::bail;
use anyhow::Result;
use camino::Utf8Path;
use camino::Utf8PathBuf;
const CODESIGN_ARGS: &[&str] = &["-vvvv", "-o", "runtime", "-s", "Developer ID Application:"];

View file

@ -1,10 +1,13 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{fs, process::Command};
use std::fs;
use std::process::Command;
use anyhow::{Context, Result};
use camino::{Utf8Path, Utf8PathBuf};
use anyhow::Context;
use anyhow::Result;
use camino::Utf8Path;
use camino::Utf8PathBuf;
use clap::Args;
use crate::notarize::wait_then_staple_app;

View file

@ -10,14 +10,23 @@ mod codesign;
mod dmg;
mod notarize;
use std::{env, fs, os::unix::prelude::PermissionsExt, process::Command};
use std::env;
use std::fs;
use std::os::unix::prelude::PermissionsExt;
use std::process::Command;
use anyhow::{bail, Result};
use anyhow::bail;
use anyhow::Result;
use apple_bundles::MacOsApplicationBundleBuilder;
use camino::{Utf8Path, Utf8PathBuf};
use clap::{Parser, Subcommand, ValueEnum};
use codesign::{codesign_app, codesign_python_libs};
use dmg::{make_dmgs, BuildDmgsArgs};
use camino::Utf8Path;
use camino::Utf8PathBuf;
use clap::Parser;
use clap::Subcommand;
use clap::ValueEnum;
use codesign::codesign_app;
use codesign::codesign_python_libs;
use dmg::make_dmgs;
use dmg::BuildDmgsArgs;
use notarize::notarize_app;
use plist::Value;
use simple_file_manifest::FileEntry;

View file

@ -1,9 +1,13 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{env, fs, process::Command};
use std::env;
use std::fs;
use std::process::Command;
use anyhow::{bail, Context, Result};
use anyhow::bail;
use anyhow::Context;
use anyhow::Result;
use camino::Utf8Path;
use serde::Deserialize;

View file

@ -1,12 +1,21 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{fs, io::prelude::*, path::Path, process::Command};
use std::fs;
use std::io::prelude::*;
use std::path::Path;
use std::process::Command;
use anyhow::{bail, Context, Result};
use camino::{Utf8Path, Utf8PathBuf};
use anyhow::bail;
use anyhow::Context;
use anyhow::Result;
use camino::Utf8Path;
use camino::Utf8PathBuf;
use clap::Parser;
use tugger_windows_codesign::{CodeSigningCertificate, SigntoolSign, SystemStore, TimestampServer};
use tugger_windows_codesign::CodeSigningCertificate;
use tugger_windows_codesign::SigntoolSign;
use tugger_windows_codesign::SystemStore;
use tugger_windows_codesign::TimestampServer;
use walkdir::WalkDir;
#[derive(Parser)]

View file

@ -2,7 +2,9 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki::card_rendering::anki_tag_benchmark;
use criterion::{criterion_group, criterion_main, Criterion};
use criterion::criterion_group;
use criterion::criterion_main;
use criterion::Criterion;
pub fn criterion_benchmark(c: &mut Criterion) {
c.bench_function("anki_tag_parse", |b| b.iter(|| anki_tag_benchmark()));

View file

@ -1,7 +1,9 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{env, fmt::Write, path::PathBuf};
use std::env;
use std::fmt::Write;
use std::path::PathBuf;
struct CustomGenerator {}

View file

@ -1,9 +1,11 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
//! Check the .ftl files at build time to ensure we don't get runtime load failures.
//! Check the .ftl files at build time to ensure we don't get runtime load
//! failures.
use fluent::{FluentBundle, FluentResource};
use fluent::FluentBundle;
use fluent::FluentResource;
use unic_langid::LanguageIdentifier;
use super::gather::TranslationsByLang;

View file

@ -1,12 +1,15 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{collections::HashSet, fmt::Write};
use std::collections::HashSet;
use std::fmt::Write;
use fluent_syntax::{
ast::{Entry, Expression, InlineExpression, Pattern, PatternElement},
parser::parse,
};
use fluent_syntax::ast::Entry;
use fluent_syntax::ast::Expression;
use fluent_syntax::ast::InlineExpression;
use fluent_syntax::ast::Pattern;
use fluent_syntax::ast::PatternElement;
use fluent_syntax::parser::parse;
use serde::Serialize;
use crate::gather::TranslationsByLang;
@ -163,7 +166,8 @@ impl Visitor {
impl From<String> for Variable {
fn from(name: String) -> Self {
// rather than adding more items here as we add new strings, we should probably
// try to either reuse existing ones, or consider some sort of Hungarian notation
// try to either reuse existing ones, or consider some sort of Hungarian
// notation
let kind = match name.as_str() {
"cards" | "notes" | "count" | "amount" | "reviews" | "total" | "selected"
| "kilobytes" | "daysStart" | "daysEnd" | "days" | "secs-per-card" | "remaining"

View file

@ -1,14 +1,14 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
//! By default, the Qt translations will be included in rslib. EXTRA_FTL_ROOT can be set
//! to an external folder so the mobile clients can use their own translations instead.
//! By default, the Qt translations will be included in rslib. EXTRA_FTL_ROOT
//! can be set to an external folder so the mobile clients can use their own
//! translations instead.
use std::{
collections::HashMap,
fs,
path::{Path, PathBuf},
};
use std::collections::HashMap;
use std::fs;
use std::path::Path;
use std::path::PathBuf;
pub type TranslationsByFile = HashMap<String, String>;
pub type TranslationsByLang = HashMap<String, TranslationsByFile>;
@ -26,7 +26,8 @@ pub fn get_ftl_data() -> TranslationsByLang {
if let Some(path) = extra_ftl_root() {
// Mobile client has requested its own extra translations
add_translation_root(&mut map, &path, false);
// In a debug build, also include the Qt translations so that our Python unit tests pass.
// In a debug build, also include the Qt translations so that our Python unit
// tests pass.
if std::env::var("RELEASE").is_err() {
add_folder(&mut map, &ftl_base.join("qt"), "templates");
}

View file

@ -3,14 +3,17 @@
//! Write strings to a strings.rs file that will be compiled into the binary.
use std::{fmt::Write, fs, path::PathBuf};
use std::fmt::Write;
use std::fs;
use std::path::PathBuf;
use inflections::Inflect;
use crate::{
extract::{Module, Translation, VariableKind},
gather::{TranslationsByFile, TranslationsByLang},
};
use crate::extract::Module;
use crate::extract::Translation;
use crate::extract::VariableKind;
use crate::gather::TranslationsByFile;
use crate::gather::TranslationsByLang;
pub fn write_strings(map: &TranslationsByLang, modules: &[Module]) {
let mut buf = String::new();

View file

@ -3,14 +3,17 @@
mod generated;
use std::{
borrow::Cow,
sync::{Arc, Mutex},
};
use std::borrow::Cow;
use std::sync::Arc;
use std::sync::Mutex;
use fluent::{types::FluentNumber, FluentArgs, FluentResource, FluentValue};
use fluent::types::FluentNumber;
use fluent::FluentArgs;
use fluent::FluentResource;
use fluent::FluentValue;
use fluent_bundle::bundle::FluentBundle as FluentBundleOrig;
use generated::{KEYS_BY_MODULE, STRINGS};
use generated::KEYS_BY_MODULE;
use generated::STRINGS;
use num_format::Locale;
use serde::Serialize;
use unic_langid::LanguageIdentifier;
@ -265,7 +268,8 @@ impl I18n {
key.to_string().into()
}
/// Return text from configured locales for use with the JS Fluent implementation.
/// Return text from configured locales for use with the JS Fluent
/// implementation.
pub fn resources_for_js(&self, desired_modules: &[String]) -> ResourcesForJavascript {
let inner = self.inner.lock().unwrap();
let resources = get_modules(&inner.langs, desired_modules);

View file

@ -1,19 +1,24 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{collections::HashSet, fs, io::BufReader, iter::FromIterator};
use std::collections::HashSet;
use std::fs;
use std::io::BufReader;
use std::iter::FromIterator;
use fluent_syntax::{ast, parser};
use fluent_syntax::ast;
use fluent_syntax::parser;
use lazy_static::lazy_static;
use regex::Regex;
use serde_json;
use walkdir::{DirEntry, WalkDir};
use walkdir::DirEntry;
use walkdir::WalkDir;
use crate::serialize;
/// Extract references from all Rust, Python, TS, Svelte, Swift and Designer files in
/// the `roots`, convert them to kebab case and write them as a json to the
/// target file.
/// Extract references from all Rust, Python, TS, Svelte, Swift and Designer
/// files in the `roots`, convert them to kebab case and write them as a json to
/// the target file.
pub fn extract_ftl_references<S1: AsRef<str>, S2: AsRef<str>>(roots: &[S1], target: S2) {
let mut refs = HashSet::new();
for root in roots {

View file

@ -3,11 +3,12 @@
// copied from https://github.com/projectfluent/fluent-rs/pull/241
use std::fmt::{
Error, Write, {self},
};
use std::fmt;
use std::fmt::Error;
use std::fmt::Write;
use fluent_syntax::{ast::*, parser::Slice};
use fluent_syntax::ast::*;
use fluent_syntax::parser::Slice;
pub fn serialize<'s, S: Slice<'s>>(resource: &Resource<S>) -> String {
serialize_with_options(resource, Options::default())

View file

@ -5,20 +5,22 @@ use anki::links::HelpPage;
#[cfg(test)]
mod test {
use std::{env, iter};
use std::env;
use std::iter;
use futures::StreamExt;
use itertools::Itertools;
use linkcheck::{
validation::{check_web, Context, Reason},
BasicContext,
};
use linkcheck::validation::check_web;
use linkcheck::validation::Context;
use linkcheck::validation::Reason;
use linkcheck::BasicContext;
use reqwest::Url;
use strum::IntoEnumIterator;
use super::*;
/// Aggregates [`Outcome`]s by collecting the error messages of the invalid ones.
/// Aggregates [`Outcome`]s by collecting the error messages of the invalid
/// ones.
#[derive(Default)]
struct Outcomes(Vec<String>);

View file

@ -15,8 +15,9 @@ impl Collection {
///
/// - When 'default to the current deck' is enabled, we use the current deck
/// if it's normal, the provided reviewer card's deck as a fallback, and
/// Default as a final fallback. We then fetch the last used notetype stored
/// in the deck, falling back to the global notetype, or the first available one.
/// Default as a final fallback. We then fetch the last used notetype
/// stored in the deck, falling back to the global notetype, or the first
/// available one.
///
/// - Otherwise, each note type remembers the last deck cards were added to,
/// and we use that, defaulting to the current deck if missing, and
@ -49,7 +50,8 @@ impl Collection {
})
}
/// The currently selected deck, the home deck of the provided card, or the default deck.
/// The currently selected deck, the home deck of the provided card, or the
/// default deck.
fn get_current_deck_for_adding(
&mut self,
home_deck_of_reviewer_card: DeckId,
@ -96,9 +98,10 @@ impl Collection {
}
/// Returns the last deck added to with this notetype, provided it is valid.
/// This is optional due to the inconsistent handling, where changes in notetype
/// may need to update the current deck, but not vice versa. If a previous deck is
/// not set, we want to keep the current selection, instead of resetting it.
/// This is optional due to the inconsistent handling, where changes in
/// notetype may need to update the current deck, but not vice versa. If
/// a previous deck is not set, we want to keep the current selection,
/// instead of resetting it.
pub(crate) fn default_deck_for_notetype(&mut self, ntid: NotetypeId) -> Result<Option<DeckId>> {
if let Some(last_deck_id) = self.get_last_deck_added_to_for_notetype(ntid) {
if let Some(deck) = self.get_deck(last_deck_id)? {

View file

@ -1,7 +1,8 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{adding::DeckAndNotetype, pb::notes::DeckAndNotetype as DeckAndNotetypeProto};
use crate::adding::DeckAndNotetype;
use crate::pb::notes::DeckAndNotetype as DeckAndNotetypeProto;
impl From<DeckAndNotetype> for DeckAndNotetypeProto {
fn from(s: DeckAndNotetype) -> Self {

View file

@ -1,29 +1,27 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{
collections::HashMap,
mem::size_of,
sync::{
atomic::{AtomicI32, Ordering},
Mutex,
},
};
use std::collections::HashMap;
use std::mem::size_of;
use std::sync::atomic::AtomicI32;
use std::sync::atomic::Ordering;
use std::sync::Mutex;
use itertools::{
FoldWhile,
FoldWhile::{Continue, Done},
Itertools,
};
use itertools::FoldWhile;
use itertools::FoldWhile::Continue;
use itertools::FoldWhile::Done;
use itertools::Itertools;
use lazy_static::lazy_static;
use rusqlite::ToSql;
use serde_derive::Deserialize;
use crate::{
collection::Collection,
error::Result,
pb::ankidroid::{sql_value::Data, DbResponse, DbResult, Row, SqlValue},
};
use crate::collection::Collection;
use crate::error::Result;
use crate::pb::ankidroid::sql_value::Data;
use crate::pb::ankidroid::DbResponse;
use crate::pb::ankidroid::DbResult;
use crate::pb::ankidroid::Row;
use crate::pb::ankidroid::SqlValue;
/// A pointer to the SqliteStorage object stored in a collection, used to
/// uniquely index results from multiple open collections at once.
@ -76,9 +74,10 @@ impl Sizable for Row {
impl Sizable for DbResult {
fn estimate_size(&self) -> usize {
// Performance: It might be best to take the first x rows and determine the data types
// If we have floats or longs, they'll be a fixed size (excluding nulls) and should speed
// up the calculation as we'll only calculate a subset of the columns.
// Performance: It might be best to take the first x rows and determine the data
// types If we have floats or longs, they'll be a fixed size (excluding
// nulls) and should speed up the calculation as we'll only calculate a
// subset of the columns.
self.rows.iter().map(|x| x.estimate_size()).sum()
}
}
@ -96,8 +95,9 @@ fn select_slice_of_size<'a>(
let init: Vec<Row> = Vec::new();
rows.fold_while((0, init), |mut acc, x| {
let new_size = acc.0 + x.estimate_size();
// If the accumulator is 0, but we're over the size: return a single result so we don't loop forever.
// Theoretically, this shouldn't happen as data should be reasonably sized
// If the accumulator is 0, but we're over the size: return a single result so
// we don't loop forever. Theoretically, this shouldn't happen as data
// should be reasonably sized
if new_size > max_size && acc.0 > 0 {
Done(acc)
} else {
@ -147,7 +147,8 @@ pub(crate) fn trim_and_cache_remaining(
) -> DbResponse {
let start_index = 0;
// PERF: Could speed this up by not creating the vector and just calculating the count
// PERF: Could speed this up by not creating the vector and just calculating the
// count
let first_result = select_next_slice(values.rows.iter());
let row_count = values.rows.len() as i32;
@ -279,11 +280,12 @@ pub(crate) fn execute_for_row_count(col: &Collection, req: &[u8]) -> Result<i64>
#[cfg(test)]
mod tests {
use super::*;
use crate::{
backend::ankidroid::db::{select_slice_of_size, Sizable},
collection::open_test_collection,
pb::ankidroid::{sql_value, Row, SqlValue},
};
use crate::backend::ankidroid::db::select_slice_of_size;
use crate::backend::ankidroid::db::Sizable;
use crate::collection::open_test_collection;
use crate::pb::ankidroid::sql_value;
use crate::pb::ankidroid::Row;
use crate::pb::ankidroid::SqlValue;
fn gen_data() -> Vec<SqlValue> {
vec![

View file

@ -1,13 +1,17 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{
error::{
DbError, DbErrorKind as DB, FilteredDeckError, InvalidInputError, NetworkError,
NetworkErrorKind as Net, NotFoundError, SearchErrorKind, SyncError, SyncErrorKind as Sync,
},
prelude::AnkiError,
};
use crate::error::DbError;
use crate::error::DbErrorKind as DB;
use crate::error::FilteredDeckError;
use crate::error::InvalidInputError;
use crate::error::NetworkError;
use crate::error::NetworkErrorKind as Net;
use crate::error::NotFoundError;
use crate::error::SearchErrorKind;
use crate::error::SyncError;
use crate::error::SyncErrorKind as Sync;
use crate::prelude::AnkiError;
pub(super) fn debug_produce_error(s: &str) -> AnkiError {
let info = "error_value".to_string();

View file

@ -4,23 +4,25 @@
pub(crate) mod db;
pub(crate) mod error;
use self::{db::active_sequences, error::debug_produce_error};
use super::{
dbproxy::{db_command_bytes, db_command_proto},
Backend,
};
use self::db::active_sequences;
use self::error::debug_produce_error;
use super::dbproxy::db_command_bytes;
use super::dbproxy::db_command_proto;
use super::Backend;
use crate::backend::ankidroid::db::execute_for_row_count;
use crate::backend::ankidroid::db::insert_for_id;
use crate::pb;
pub(super) use crate::pb::ankidroid::ankidroid_service::Service as AnkidroidService;
use crate::{
backend::ankidroid::db::{execute_for_row_count, insert_for_id},
pb,
pb::{
ankidroid::{DbResponse, GetActiveSequenceNumbersResponse, GetNextResultPageRequest},
generic,
generic::{Empty, Int32, Json},
},
prelude::*,
scheduler::{timing, timing::fixed_offset_from_minutes},
};
use crate::pb::ankidroid::DbResponse;
use crate::pb::ankidroid::GetActiveSequenceNumbersResponse;
use crate::pb::ankidroid::GetNextResultPageRequest;
use crate::pb::generic;
use crate::pb::generic::Empty;
use crate::pb::generic::Int32;
use crate::pb::generic::Json;
use crate::prelude::*;
use crate::scheduler::timing;
use crate::scheduler::timing::fixed_offset_from_minutes;
impl AnkidroidService for Backend {
fn sched_timing_today_legacy(

View file

@ -2,12 +2,11 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::Backend;
use crate::card::CardQueue;
use crate::card::CardType;
use crate::pb;
pub(super) use crate::pb::cards::cards_service::Service as CardsService;
use crate::{
card::{CardQueue, CardType},
pb,
prelude::*,
};
use crate::prelude::*;
impl CardsService for Backend {
fn get_card(&self, input: pb::cards::CardId) -> Result<pb::cards::Card> {

View file

@ -2,21 +2,24 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::Backend;
use crate::card_rendering::extract_av_tags;
use crate::card_rendering::strip_av_tags;
use crate::latex::extract_latex;
use crate::latex::extract_latex_expanding_clozes;
use crate::latex::ExtractedLatex;
use crate::markdown::render_markdown;
use crate::notetype::CardTemplateSchema11;
use crate::notetype::RenderCardOutput;
use crate::pb;
pub(super) use crate::pb::card_rendering::cardrendering_service::Service as CardRenderingService;
use crate::{
card_rendering::{extract_av_tags, strip_av_tags},
latex::{extract_latex, extract_latex_expanding_clozes, ExtractedLatex},
markdown::render_markdown,
notetype::{CardTemplateSchema11, RenderCardOutput},
pb,
prelude::*,
template::RenderedNode,
text::{
decode_iri_paths, encode_iri_paths, sanitize_html_no_images, strip_html,
strip_html_preserving_media_filenames,
},
typeanswer::compare_answer,
};
use crate::prelude::*;
use crate::template::RenderedNode;
use crate::text::decode_iri_paths;
use crate::text::encode_iri_paths;
use crate::text::sanitize_html_no_images;
use crate::text::strip_html;
use crate::text::strip_html_preserving_media_filenames;
use crate::typeanswer::compare_answer;
impl CardRenderingService for Backend {
fn extract_av_tags(

View file

@ -5,12 +5,14 @@ use std::sync::MutexGuard;
use tracing::error;
use super::{progress::Progress, Backend};
use super::progress::Progress;
use super::Backend;
use crate::backend::progress::progress_to_proto;
use crate::collection::CollectionBuilder;
use crate::pb;
pub(super) use crate::pb::collection::collection_service::Service as CollectionService;
use crate::{
backend::progress::progress_to_proto, collection::CollectionBuilder, pb, prelude::*,
storage::SchemaVersion,
};
use crate::prelude::*;
use crate::storage::SchemaVersion;
impl CollectionService for Backend {
fn latest_progress(&self, _input: pb::generic::Empty) -> Result<pb::collection::Progress> {

View file

@ -4,13 +4,13 @@
use serde_json::Value;
use super::Backend;
use crate::config::BoolKey;
use crate::config::StringKey;
use crate::pb;
use crate::pb::config::config_key::Bool as BoolKeyProto;
use crate::pb::config::config_key::String as StringKeyProto;
pub(super) use crate::pb::config::config_service::Service as ConfigService;
use crate::{
config::{BoolKey, StringKey},
pb,
pb::config::config_key::{Bool as BoolKeyProto, String as StringKeyProto},
prelude::*,
};
use crate::prelude::*;
impl From<BoolKeyProto> for BoolKey {
fn from(k: BoolKeyProto) -> Self {

View file

@ -1,21 +1,24 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use rusqlite::{
params_from_iter,
types::{FromSql, FromSqlError, ToSql, ToSqlOutput, ValueRef},
OptionalExtension,
};
use serde_derive::{Deserialize, Serialize};
use rusqlite::params_from_iter;
use rusqlite::types::FromSql;
use rusqlite::types::FromSqlError;
use rusqlite::types::ToSql;
use rusqlite::types::ToSqlOutput;
use rusqlite::types::ValueRef;
use rusqlite::OptionalExtension;
use serde_derive::Deserialize;
use serde_derive::Serialize;
use crate::{
pb,
pb::ankidroid::{
sql_value::Data, DbResponse, DbResult as ProtoDbResult, Row, SqlValue as pb_SqlValue,
},
prelude::*,
storage::SqliteStorage,
};
use crate::pb;
use crate::pb::ankidroid::sql_value::Data;
use crate::pb::ankidroid::DbResponse;
use crate::pb::ankidroid::DbResult as ProtoDbResult;
use crate::pb::ankidroid::Row;
use crate::pb::ankidroid::SqlValue as pb_SqlValue;
use crate::prelude::*;
use crate::storage::SqliteStorage;
#[derive(Deserialize)]
#[serde(tag = "kind", rename_all = "lowercase")]

View file

@ -2,12 +2,12 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::Backend;
use crate::deckconfig::DeckConfSchema11;
use crate::deckconfig::DeckConfig;
use crate::deckconfig::UpdateDeckConfigsRequest;
use crate::pb;
pub(super) use crate::pb::deckconfig::deckconfig_service::Service as DeckConfigService;
use crate::{
deckconfig::{DeckConfSchema11, DeckConfig, UpdateDeckConfigsRequest},
pb,
prelude::*,
};
use crate::prelude::*;
impl DeckConfigService for Backend {
fn add_or_update_deck_config_legacy(

View file

@ -4,13 +4,12 @@
use std::convert::TryFrom;
use super::Backend;
use crate::decks::DeckSchema11;
use crate::decks::FilteredSearchOrder;
use crate::pb;
pub(super) use crate::pb::decks::decks_service::Service as DecksService;
use crate::{
decks::{DeckSchema11, FilteredSearchOrder},
pb,
prelude::*,
scheduler::filtered::FilteredDeckForUpdate,
};
use crate::prelude::*;
use crate::scheduler::filtered::FilteredDeckForUpdate;
impl DecksService for Backend {
fn new_deck(&self, _input: pb::generic::Empty) -> Result<pb::decks::Deck> {

View file

@ -1,12 +1,11 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{
error::{AnkiError, SyncErrorKind},
pb,
pb::backend::backend_error::Kind,
prelude::*,
};
use crate::error::AnkiError;
use crate::error::SyncErrorKind;
use crate::pb;
use crate::pb::backend::backend_error::Kind;
use crate::prelude::*;
impl AnkiError {
pub fn into_protobuf(self, tr: &I18n) -> pb::backend::BackendError {

View file

@ -1,7 +1,8 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{pb, prelude::*};
use crate::pb;
use crate::prelude::*;
impl From<Vec<u8>> for pb::generic::Json {
fn from(json: Vec<u8>) -> Self {

View file

@ -3,15 +3,15 @@
use std::collections::HashMap;
use fluent::{FluentArgs, FluentValue};
use fluent::FluentArgs;
use fluent::FluentValue;
use super::Backend;
use crate::pb;
pub(super) use crate::pb::i18n::i18n_service::Service as I18nService;
use crate::{
pb,
prelude::*,
scheduler::timespan::{answer_button_time, time_span},
};
use crate::prelude::*;
use crate::scheduler::timespan::answer_button_time;
use crate::scheduler::timespan::time_span;
impl I18nService for Backend {
fn translate_string(

View file

@ -3,15 +3,18 @@
use std::path::Path;
use super::{progress::Progress, Backend};
use super::progress::Progress;
use super::Backend;
use crate::import_export::package::import_colpkg;
use crate::import_export::ExportProgress;
use crate::import_export::ImportProgress;
use crate::import_export::NoteLog;
use crate::pb;
use crate::pb::import_export::export_limit;
pub(super) use crate::pb::import_export::importexport_service::Service as ImportExportService;
use crate::{
import_export::{package::import_colpkg, ExportProgress, ImportProgress, NoteLog},
pb,
pb::import_export::{export_limit, ExportLimit},
prelude::*,
search::SearchNode,
};
use crate::pb::import_export::ExportLimit;
use crate::prelude::*;
use crate::search::SearchNode;
impl ImportExportService for Backend {
fn export_collection_package(

View file

@ -2,8 +2,10 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::Backend;
use crate::pb;
use crate::pb::links::help_page_link_request::HelpPage;
pub(super) use crate::pb::links::links_service::Service as LinksService;
use crate::{pb, pb::links::help_page_link_request::HelpPage, prelude::*};
use crate::prelude::*;
impl LinksService for Backend {
fn help_page_link(&self, input: pb::links::HelpPageLinkRequest) -> Result<pb::generic::String> {

View file

@ -1,9 +1,12 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::{progress::Progress, Backend};
use super::progress::Progress;
use super::Backend;
use crate::media::check::MediaChecker;
use crate::pb;
pub(super) use crate::pb::media::media_service::Service as MediaService;
use crate::{media::check::MediaChecker, pb, prelude::*};
use crate::prelude::*;
impl MediaService for Backend {
// media

View file

@ -29,39 +29,41 @@ mod stats;
mod sync;
mod tags;
use std::{
result,
sync::{Arc, Mutex},
thread::JoinHandle,
};
use std::result;
use std::sync::Arc;
use std::sync::Mutex;
use std::thread::JoinHandle;
use once_cell::sync::OnceCell;
use progress::AbortHandleSlot;
use prost::Message;
use tokio::{runtime, runtime::Runtime};
use tokio::runtime;
use tokio::runtime::Runtime;
use self::{
ankidroid::AnkidroidService,
card::CardsService,
cardrendering::CardRenderingService,
collection::CollectionService,
config::ConfigService,
deckconfig::DeckConfigService,
decks::DecksService,
i18n::I18nService,
import_export::ImportExportService,
links::LinksService,
media::MediaService,
notes::NotesService,
notetypes::NotetypesService,
progress::ProgressState,
scheduler::SchedulerService,
search::SearchService,
stats::StatsService,
sync::{SyncService, SyncState},
tags::TagsService,
};
use crate::{backend::dbproxy::db_command_bytes, pb, pb::backend::ServiceIndex, prelude::*};
use self::ankidroid::AnkidroidService;
use self::card::CardsService;
use self::cardrendering::CardRenderingService;
use self::collection::CollectionService;
use self::config::ConfigService;
use self::deckconfig::DeckConfigService;
use self::decks::DecksService;
use self::i18n::I18nService;
use self::import_export::ImportExportService;
use self::links::LinksService;
use self::media::MediaService;
use self::notes::NotesService;
use self::notetypes::NotetypesService;
use self::progress::ProgressState;
use self::scheduler::SchedulerService;
use self::search::SearchService;
use self::stats::StatsService;
use self::sync::SyncService;
use self::sync::SyncState;
use self::tags::TagsService;
use crate::backend::dbproxy::db_command_bytes;
use crate::pb;
use crate::pb::backend::ServiceIndex;
use crate::prelude::*;
pub struct Backend {
col: Arc<Mutex<Option<Collection>>>,

View file

@ -4,8 +4,10 @@
use std::collections::HashSet;
use super::Backend;
use crate::cloze::add_cloze_numbers_in_string;
use crate::pb;
pub(super) use crate::pb::notes::notes_service::Service as NotesService;
use crate::{cloze::add_cloze_numbers_in_string, pb, prelude::*};
use crate::prelude::*;
impl NotesService for Backend {
fn new_note(&self, input: pb::notetypes::NotetypeId) -> Result<pb::notes::Note> {

View file

@ -2,15 +2,15 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::Backend;
use crate::config::get_aux_notetype_config_key;
use crate::notetype::all_stock_notetypes;
use crate::notetype::ChangeNotetypeInput;
use crate::notetype::Notetype;
use crate::notetype::NotetypeChangeInfo;
use crate::notetype::NotetypeSchema11;
use crate::pb;
pub(super) use crate::pb::notetypes::notetypes_service::Service as NotetypesService;
use crate::{
config::get_aux_notetype_config_key,
notetype::{
all_stock_notetypes, ChangeNotetypeInput, Notetype, NotetypeChangeInfo, NotetypeSchema11,
},
pb,
prelude::*,
};
use crate::prelude::*;
impl NotetypesService for Backend {
fn add_notetype(

View file

@ -1,12 +1,11 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{
ops::OpChanges,
pb,
prelude::*,
undo::{UndoOutput, UndoStatus},
};
use crate::ops::OpChanges;
use crate::pb;
use crate::prelude::*;
use crate::undo::UndoOutput;
use crate::undo::UndoStatus;
impl From<OpChanges> for pb::collection::OpChanges {
fn from(c: OpChanges) -> Self {

View file

@ -1,24 +1,21 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::sync::{Arc, Mutex};
use std::sync::Arc;
use std::sync::Mutex;
use futures::future::AbortHandle;
use super::Backend;
use crate::{
dbcheck::DatabaseCheckProgress,
i18n::I18n,
import_export::{ExportProgress, ImportProgress},
pb,
sync::{
collection::{
normal::NormalSyncProgress,
progress::{FullSyncProgress, SyncStage},
},
media::progress::MediaSyncProgress,
},
};
use crate::dbcheck::DatabaseCheckProgress;
use crate::i18n::I18n;
use crate::import_export::ExportProgress;
use crate::import_export::ImportProgress;
use crate::pb;
use crate::sync::collection::normal::NormalSyncProgress;
use crate::sync::collection::progress::FullSyncProgress;
use crate::sync::collection::progress::SyncStage;
use crate::sync::media::progress::MediaSyncProgress;
pub(super) struct ThrottlingProgressHandler {
pub state: Arc<Mutex<ProgressState>>,

View file

@ -3,14 +3,12 @@
use std::mem;
use crate::{
pb,
prelude::*,
scheduler::{
answering::{CardAnswer, Rating},
queue::{QueuedCard, QueuedCards},
},
};
use crate::pb;
use crate::prelude::*;
use crate::scheduler::answering::CardAnswer;
use crate::scheduler::answering::Rating;
use crate::scheduler::queue::QueuedCard;
use crate::scheduler::queue::QueuedCards;
impl From<pb::scheduler::CardAnswer> for CardAnswer {
fn from(mut answer: pb::scheduler::CardAnswer) -> Self {

View file

@ -5,20 +5,17 @@ mod answering;
mod states;
use super::Backend;
use crate::pb;
pub(super) use crate::pb::scheduler::scheduler_service::Service as SchedulerService;
use crate::{
pb,
prelude::*,
scheduler::{
new::NewCardDueOrder,
states::{CardState, SchedulingStates},
},
stats::studied_today,
};
use crate::prelude::*;
use crate::scheduler::new::NewCardDueOrder;
use crate::scheduler::states::CardState;
use crate::scheduler::states::SchedulingStates;
use crate::stats::studied_today;
impl SchedulerService for Backend {
/// This behaves like _updateCutoff() in older code - it also unburies at the start of
/// a new day.
/// This behaves like _updateCutoff() in older code - it also unburies at
/// the start of a new day.
fn sched_timing_today(
&self,
_input: pb::generic::Empty,

View file

@ -1,7 +1,8 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{pb, scheduler::states::FilteredState};
use crate::pb;
use crate::scheduler::states::FilteredState;
impl From<FilteredState> for pb::scheduler::scheduling_state::Filtered {
fn from(state: FilteredState) -> Self {

View file

@ -1,7 +1,8 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{pb, scheduler::states::LearnState};
use crate::pb;
use crate::scheduler::states::LearnState;
impl From<pb::scheduler::scheduling_state::Learning> for LearnState {
fn from(state: pb::scheduler::scheduling_state::Learning) -> Self {

View file

@ -10,10 +10,11 @@ mod relearning;
mod rescheduling;
mod review;
use crate::{
pb,
scheduler::states::{CardState, NewState, NormalState, SchedulingStates},
};
use crate::pb;
use crate::scheduler::states::CardState;
use crate::scheduler::states::NewState;
use crate::scheduler::states::NormalState;
use crate::scheduler::states::SchedulingStates;
impl From<SchedulingStates> for pb::scheduler::SchedulingStates {
fn from(choices: SchedulingStates) -> Self {

View file

@ -1,7 +1,8 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{pb, scheduler::states::NewState};
use crate::pb;
use crate::scheduler::states::NewState;
impl From<pb::scheduler::scheduling_state::New> for NewState {
fn from(state: pb::scheduler::scheduling_state::New) -> Self {

View file

@ -1,7 +1,8 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{pb, scheduler::states::NormalState};
use crate::pb;
use crate::scheduler::states::NormalState;
impl From<NormalState> for pb::scheduler::scheduling_state::Normal {
fn from(state: NormalState) -> Self {

View file

@ -1,7 +1,8 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{pb, scheduler::states::PreviewState};
use crate::pb;
use crate::scheduler::states::PreviewState;
impl From<pb::scheduler::scheduling_state::Preview> for PreviewState {
fn from(state: pb::scheduler::scheduling_state::Preview) -> Self {

View file

@ -1,7 +1,8 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{pb, scheduler::states::RelearnState};
use crate::pb;
use crate::scheduler::states::RelearnState;
impl From<pb::scheduler::scheduling_state::Relearning> for RelearnState {
fn from(state: pb::scheduler::scheduling_state::Relearning) -> Self {

View file

@ -1,7 +1,8 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{pb, scheduler::states::ReschedulingFilterState};
use crate::pb;
use crate::scheduler::states::ReschedulingFilterState;
impl From<pb::scheduler::scheduling_state::ReschedulingFilter> for ReschedulingFilterState {
fn from(state: pb::scheduler::scheduling_state::ReschedulingFilter) -> Self {

View file

@ -1,7 +1,8 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{pb, scheduler::states::ReviewState};
use crate::pb;
use crate::scheduler::states::ReviewState;
impl From<pb::scheduler::scheduling_state::Review> for ReviewState {
fn from(state: pb::scheduler::scheduling_state::Review) -> Self {

View file

@ -3,7 +3,9 @@
use std::str::FromStr;
use crate::{browser_table, i18n::I18n, pb};
use crate::browser_table;
use crate::i18n::I18n;
use crate::pb;
impl browser_table::Column {
pub fn to_pb_column(self, i18n: &I18n) -> pb::search::browser_columns::Column {

View file

@ -4,17 +4,20 @@
mod browser_table;
mod search_node;
use std::{str::FromStr, sync::Arc};
use std::str::FromStr;
use std::sync::Arc;
use super::{notes::to_note_ids, Backend};
use super::notes::to_note_ids;
use super::Backend;
use crate::browser_table::Column;
use crate::pb;
pub(super) use crate::pb::search::search_service::Service as SearchService;
use crate::{
browser_table::Column,
pb,
pb::search::sort_order::Value as SortOrderProto,
prelude::*,
search::{replace_search_node, JoinSearches, Node, SortMode},
};
use crate::pb::search::sort_order::Value as SortOrderProto;
use crate::prelude::*;
use crate::search::replace_search_node;
use crate::search::JoinSearches;
use crate::search::Node;
use crate::search::SortMode;
impl SearchService for Backend {
fn build_search_string(&self, input: pb::search::SearchNode) -> Result<pb::generic::String> {

View file

@ -3,20 +3,26 @@
use itertools::Itertools;
use crate::{
pb,
prelude::*,
search::{
parse_search, Negated, Node, PropertyKind, RatingKind, SearchNode, StateKind, TemplateKind,
},
text::{escape_anki_wildcards, escape_anki_wildcards_for_search_node},
};
use crate::pb;
use crate::prelude::*;
use crate::search::parse_search;
use crate::search::Negated;
use crate::search::Node;
use crate::search::PropertyKind;
use crate::search::RatingKind;
use crate::search::SearchNode;
use crate::search::StateKind;
use crate::search::TemplateKind;
use crate::text::escape_anki_wildcards;
use crate::text::escape_anki_wildcards_for_search_node;
impl TryFrom<pb::search::SearchNode> for Node {
type Error = AnkiError;
fn try_from(msg: pb::search::SearchNode) -> std::result::Result<Self, Self::Error> {
use pb::search::search_node::{group::Joiner, Filter, Flag};
use pb::search::search_node::group::Joiner;
use pb::search::search_node::Filter;
use pb::search::search_node::Flag;
Ok(if let Some(filter) = msg.filter {
match filter {
Filter::Tag(s) => SearchNode::from_tag_name(&s).into(),

View file

@ -2,8 +2,10 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::Backend;
use crate::pb;
pub(super) use crate::pb::stats::stats_service::Service as StatsService;
use crate::{pb, prelude::*, revlog::RevlogReviewKind};
use crate::prelude::*;
use crate::revlog::RevlogReviewKind;
impl StatsService for Backend {
fn card_stats(&self, input: pb::cards::CardId) -> Result<pb::stats::CardStatsResponse> {

View file

@ -3,27 +3,29 @@
use std::sync::Arc;
use futures::future::{AbortHandle, AbortRegistration, Abortable};
use futures::future::AbortHandle;
use futures::future::AbortRegistration;
use futures::future::Abortable;
use pb::sync::sync_status_response::Required;
use reqwest::Url;
use tracing::warn;
use super::{progress::AbortHandleSlot, Backend};
use super::progress::AbortHandleSlot;
use super::Backend;
use crate::pb;
pub(super) use crate::pb::sync::sync_service::Service as SyncService;
use crate::{
pb,
pb::sync::SyncStatusResponse,
prelude::*,
sync::{
collection::{
normal::{ClientSyncState, NormalSyncProgress, SyncActionRequired, SyncOutput},
progress::{sync_abort, FullSyncProgress},
status::online_sync_status_check,
},
http_client::HttpSyncClient,
login::{sync_login, SyncAuth},
},
};
use crate::pb::sync::SyncStatusResponse;
use crate::prelude::*;
use crate::sync::collection::normal::ClientSyncState;
use crate::sync::collection::normal::NormalSyncProgress;
use crate::sync::collection::normal::SyncActionRequired;
use crate::sync::collection::normal::SyncOutput;
use crate::sync::collection::progress::sync_abort;
use crate::sync::collection::progress::FullSyncProgress;
use crate::sync::collection::status::online_sync_status_check;
use crate::sync::http_client::HttpSyncClient;
use crate::sync::login::sync_login;
use crate::sync::login::SyncAuth;
#[derive(Default)]
pub(super) struct SyncState {
@ -264,8 +266,9 @@ impl Backend {
let state = rt.block_on(online_sync_status_check(local, &mut client))?;
{
let mut guard = self.state.lock().unwrap();
// On startup, the sync status check will block on network access, and then automatic syncing begins,
// taking hold of the mutex. By the time we reach here, our network status may be out of date,
// On startup, the sync status check will block on network access, and then
// automatic syncing begins, taking hold of the mutex. By the time
// we reach here, our network status may be out of date,
// so we discard it if stale.
if guard.sync.remote_sync_status.last_check < time_at_check_begin {
guard.sync.remote_sync_status.last_check = time_at_check_begin;

View file

@ -1,9 +1,11 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::{notes::to_note_ids, Backend};
use super::notes::to_note_ids;
use super::Backend;
use crate::pb;
pub(super) use crate::pb::tags::tags_service::Service as TagsService;
use crate::{pb, prelude::*};
use crate::prelude::*;
impl TagsService for Backend {
fn clear_unused_tags(

View file

@ -4,18 +4,22 @@
use std::sync::Arc;
use itertools::Itertools;
use strum::{Display, EnumIter, EnumString, IntoEnumIterator};
use strum::Display;
use strum::EnumIter;
use strum::EnumString;
use strum::IntoEnumIterator;
use crate::{
card::{CardQueue, CardType},
card_rendering::prettify_av_tags,
notetype::{CardTemplate, NotetypeKind},
pb,
prelude::*,
scheduler::{timespan::time_span, timing::SchedTimingToday},
template::RenderedNode,
text::html_to_text_line,
};
use crate::card::CardQueue;
use crate::card::CardType;
use crate::card_rendering::prettify_av_tags;
use crate::notetype::CardTemplate;
use crate::notetype::NotetypeKind;
use crate::pb;
use crate::prelude::*;
use crate::scheduler::timespan::time_span;
use crate::scheduler::timing::SchedTimingToday;
use crate::template::RenderedNode;
use crate::text::html_to_text_line;
#[derive(Debug, PartialEq, Eq, Clone, Copy, Display, EnumIter, EnumString)]
#[strum(serialize_all = "camelCase")]
@ -242,9 +246,9 @@ impl Collection {
}
fn get_note_maybe_with_fields(&self, id: NoteId, _with_fields: bool) -> Result<Note> {
// todo: After note.sort_field has been modified so it can be displayed in the browser,
// we can update note_field_str() and only load the note with fields if a card render is
// necessary (see #1082).
// todo: After note.sort_field has been modified so it can be displayed in the
// browser, we can update note_field_str() and only load the note with
// fields if a card render is necessary (see #1082).
if true {
self.storage.get_note(id)?
} else {
@ -449,8 +453,9 @@ impl RowContext {
}
}
/// Returns the due date of the next due card that is not in a filtered deck, new, suspended or
/// buried or the empty string if there is no such card.
/// Returns the due date of the next due card that is not in a filtered
/// deck, new, suspended or buried or the empty string if there is no
/// such card.
fn note_due_str(&self) -> String {
self.cards
.iter()
@ -461,7 +466,8 @@ impl RowContext {
.unwrap_or_else(|| "".into())
}
/// Returns the average ease of the non-new cards or a hint if there aren't any.
/// Returns the average ease of the non-new cards or a hint if there aren't
/// any.
fn ease_str(&self) -> String {
let eases: Vec<u16> = self
.cards
@ -476,7 +482,8 @@ impl RowContext {
}
}
/// Returns the average interval of the review and relearn cards if there are any.
/// Returns the average interval of the review and relearn cards if there
/// are any.
fn interval_str(&self) -> String {
if !self.notes_mode {
match self.cards[0].ctype {

View file

@ -3,24 +3,27 @@
pub(crate) mod undo;
use std::collections::{hash_map::Entry, HashMap, HashSet};
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::collections::HashSet;
use num_enum::TryFromPrimitive;
use serde_repr::{Deserialize_repr, Serialize_repr};
use serde_repr::Deserialize_repr;
use serde_repr::Serialize_repr;
use crate::{
collection::Collection,
config::SchedulerVersion,
deckconfig::DeckConfig,
decks::DeckId,
define_newtype,
error::{AnkiError, FilteredDeckError, Result},
notes::NoteId,
ops::StateChanges,
prelude::*,
timestamp::TimestampSecs,
types::Usn,
};
use crate::collection::Collection;
use crate::config::SchedulerVersion;
use crate::deckconfig::DeckConfig;
use crate::decks::DeckId;
use crate::define_newtype;
use crate::error::AnkiError;
use crate::error::FilteredDeckError;
use crate::error::Result;
use crate::notes::NoteId;
use crate::ops::StateChanges;
use crate::prelude::*;
use crate::timestamp::TimestampSecs;
use crate::types::Usn;
define_newtype!(CardId, i64);
@ -79,7 +82,8 @@ pub struct Card {
pub(crate) flags: u8,
/// The position in the new queue before leaving it.
pub(crate) original_position: Option<u32>,
/// JSON object or empty; exposed through the reviewer for persisting custom state
/// JSON object or empty; exposed through the reviewer for persisting custom
/// state
pub(crate) custom_data: String,
}
@ -159,9 +163,10 @@ impl Card {
}
}
/// Remaining steps after configured steps have changed, disregarding "remaining today".
/// [None] if same as before. A step counts as remaining if the card has not passed a step
/// with the same or a greater delay, but output will be at least 1.
/// Remaining steps after configured steps have changed, disregarding
/// "remaining today". [None] if same as before. A step counts as
/// remaining if the card has not passed a step with the same or a
/// greater delay, but output will be at least 1.
fn new_remaining_steps(&self, new_steps: &[f32], old_steps: &[f32]) -> Option<u32> {
let remaining = self.remaining_steps();
let new_remaining = old_steps
@ -387,10 +392,9 @@ impl<'a> RemainingStepsAdjuster<'a> {
#[cfg(test)]
mod test {
use crate::tests::{
open_test_collection_with_learning_card, open_test_collection_with_relearning_card,
DeckAdder,
};
use crate::tests::open_test_collection_with_learning_card;
use crate::tests::open_test_collection_with_relearning_card;
use crate::tests::DeckAdder;
#[test]
fn should_increase_remaining_learning_steps_if_new_deck_has_more_unpassed_ones() {

View file

@ -3,7 +3,8 @@
use std::collections::HashMap;
use crate::{pb, prelude::*};
use crate::pb;
use crate::prelude::*;
mod parser;
mod writer;
@ -92,7 +93,8 @@ pub fn anki_directive_benchmark() {
mod test {
use super::*;
/// Strip av tags and assert equality with input or separately passed output.
/// Strip av tags and assert equality with input or separately passed
/// output.
macro_rules! assert_av_stripped {
($input:expr) => {
assert_eq!($input, strip_av_tags($input));

View file

@ -3,16 +3,30 @@
use std::collections::HashMap;
use nom::{
branch::alt,
bytes::complete::{is_not, tag},
character::complete::{anychar, multispace0},
combinator::{map, not, recognize, success, value},
multi::{many0, many1},
sequence::{delimited, pair, preceded, separated_pair, terminated, tuple},
};
use nom::branch::alt;
use nom::bytes::complete::is_not;
use nom::bytes::complete::tag;
use nom::character::complete::anychar;
use nom::character::complete::multispace0;
use nom::combinator::map;
use nom::combinator::not;
use nom::combinator::recognize;
use nom::combinator::success;
use nom::combinator::value;
use nom::multi::many0;
use nom::multi::many1;
use nom::sequence::delimited;
use nom::sequence::pair;
use nom::sequence::preceded;
use nom::sequence::separated_pair;
use nom::sequence::terminated;
use nom::sequence::tuple;
use super::{CardNodes, Directive, Node, OtherDirective, TtsDirective};
use super::CardNodes;
use super::Directive;
use super::Node;
use super::OtherDirective;
use super::TtsDirective;
type IResult<'a, O> = nom::IResult<&'a str, O>;
@ -87,7 +101,8 @@ fn node(s: &str) -> IResult<Node> {
alt((text_node, sound_node, tag_node))(s)
}
/// A sound tag `[sound:resource]`, where `resource` is pointing to a sound or video file.
/// A sound tag `[sound:resource]`, where `resource` is pointing to a sound or
/// video file.
fn sound_node(s: &str) -> IResult<Node> {
map(
delimited(tag("[sound:"), is_not("]"), tag("]")),
@ -106,7 +121,8 @@ fn tag_node(s: &str) -> IResult<Node> {
fn opening_parser<'name, 's: 'name>(
name: &'name str,
) -> impl FnMut(&'s str) -> IResult<Vec<(&str, &str)>> + 'name {
/// List of whitespace-separated `key=val` tuples, where `val` may be empty.
/// List of whitespace-separated `key=val` tuples, where `val` may be
/// empty.
fn options(s: &str) -> IResult<Vec<(&str, &str)>> {
fn key(s: &str) -> IResult<&str> {
is_not("] \t\r\n=")(s)
@ -136,7 +152,8 @@ fn tag_node(s: &str) -> IResult<Node> {
value((), tuple((tag("[/anki:"), tag(name), tag("]"))))
}
/// Return a parser to match and return anything until a closing `name` tag is found.
/// Return a parser to match and return anything until a closing `name` tag
/// is found.
fn content_parser<'parser, 'name: 'parser, 's: 'parser>(
name: &'name str,
) -> impl FnMut(&'s str) -> IResult<&str> + 'parser {

View file

@ -3,12 +3,15 @@
use std::fmt::Write as _;
use super::{CardNodes, Directive, Node, OtherDirective, TtsDirective};
use crate::{
pb,
prelude::*,
text::{decode_entities, strip_html_for_tts},
};
use super::CardNodes;
use super::Directive;
use super::Node;
use super::OtherDirective;
use super::TtsDirective;
use crate::pb;
use crate::prelude::*;
use crate::text::decode_entities;
use crate::text::strip_html_for_tts;
impl<'a> CardNodes<'a> {
pub(super) fn write_without_av_tags(&self) -> String {

View file

@ -1,19 +1,23 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{borrow::Cow, collections::HashSet, fmt::Write};
use std::borrow::Cow;
use std::collections::HashSet;
use std::fmt::Write;
use htmlescape::encode_attribute;
use lazy_static::lazy_static;
use nom::{
branch::alt,
bytes::complete::{tag, take_while},
combinator::map,
IResult,
};
use regex::{Captures, Regex};
use nom::branch::alt;
use nom::bytes::complete::tag;
use nom::bytes::complete::take_while;
use nom::combinator::map;
use nom::IResult;
use regex::Captures;
use regex::Regex;
use crate::{latex::contains_latex, template::RenderContext, text::strip_html_preserving_entities};
use crate::latex::contains_latex;
use crate::template::RenderContext;
use crate::text::strip_html_preserving_entities;
lazy_static! {
static ref MATHJAX: Regex = Regex::new(

Some files were not shown because too many files have changed in this diff Show more