Merge branch 'main' into sassy-comments

This commit is contained in:
Matthias Metelka 2022-10-21 19:30:02 +02:00 committed by GitHub
commit 34ed551f94
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
153 changed files with 2596 additions and 1136 deletions

93
Cargo.lock generated
View file

@ -2,6 +2,15 @@
# It is not intended for manual editing.
version = 3
[[package]]
name = "addr2line"
version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b"
dependencies = [
"gimli",
]
[[package]]
name = "adler"
version = "1.0.2"
@ -67,6 +76,7 @@ dependencies = [
"bytes",
"chrono",
"coarsetime",
"convert_case 0.6.0",
"criterion",
"csv",
"dissimilar",
@ -109,6 +119,7 @@ dependencies = [
"slog-async",
"slog-envlogger",
"slog-term",
"snafu",
"strum",
"tempfile",
"tokio",
@ -215,6 +226,21 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "backtrace"
version = "0.3.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7"
dependencies = [
"addr2line",
"cc",
"cfg-if",
"libc",
"miniz_oxide",
"object",
"rustc-demangle",
]
[[package]]
name = "base64"
version = "0.13.0"
@ -409,6 +435,15 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
[[package]]
name = "convert_case"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca"
dependencies = [
"unicode-segmentation",
]
[[package]]
name = "core-foundation"
version = "0.9.3"
@ -578,7 +613,7 @@ version = "0.99.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
dependencies = [
"convert_case",
"convert_case 0.4.0",
"proc-macro2",
"quote",
"rustc_version",
@ -623,6 +658,12 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c97b9233581d84b8e1e689cdd3a47b6f69770084fc246e86a7f78b0d9c1d4a5"
[[package]]
name = "doc-comment"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
[[package]]
name = "dtoa"
version = "0.4.8"
@ -932,6 +973,12 @@ dependencies = [
"wasi 0.11.0+wasi-snapshot-preview1",
]
[[package]]
name = "gimli"
version = "0.26.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d"
[[package]]
name = "h2"
version = "0.3.14"
@ -1578,6 +1625,15 @@ dependencies = [
"libc",
]
[[package]]
name = "object"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53"
dependencies = [
"memchr",
]
[[package]]
name = "once_cell"
version = "1.15.0"
@ -2314,6 +2370,12 @@ dependencies = [
"smallvec",
]
[[package]]
name = "rustc-demangle"
version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342"
[[package]]
name = "rustc-hash"
version = "1.1.0"
@ -2665,6 +2727,29 @@ version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1"
[[package]]
name = "snafu"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd726aec4ebad65756394ff89a9b9598793d4e30121cd71690244c1e497b3aee"
dependencies = [
"backtrace",
"doc-comment",
"snafu-derive",
]
[[package]]
name = "snafu-derive"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712529e9b0b014eabaa345b38e06032767e3dc393e8b017e853b1d7247094e74"
dependencies = [
"heck",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "snowflake"
version = "1.3.0"
@ -3175,6 +3260,12 @@ dependencies = [
"tinyvec",
]
[[package]]
name = "unicode-segmentation"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a"
[[package]]
name = "unicode-width"
version = "0.1.10"

View file

@ -55,6 +55,9 @@ compile_data_attr = "glob([\"**/*.rsv\"])"
[package.metadata.raze.crates.bstr.'*']
compile_data_attr = "glob([\"**/*.dfa\"])"
[package.metadata.raze.crates.snafu.'*']
compile_data_attr = "glob([\"**/*.md\"])"
[package.metadata.raze.crates.pyo3-build-config.'*']
buildrs_additional_environment_variables = { "PYO3_NO_PYTHON" = "1" }

View file

@ -66,6 +66,15 @@ alias(
],
)
alias(
name = "convert_case",
actual = "@raze__convert_case__0_6_0//:convert_case",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "csv",
actual = "@raze__csv__1_1_6//:csv",
@ -489,6 +498,15 @@ alias(
],
)
alias(
name = "snafu",
actual = "@raze__snafu__0_7_2//:snafu",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "strum",
actual = "@raze__strum__0_24_1//:strum",
@ -608,19 +626,9 @@ alias(
# Export file for Stardoc support
exports_files(
glob([
"**/*.bazel",
"**/*.bzl",
]),
visibility = ["//visibility:public"],
)
filegroup(
name = "srcs",
srcs = glob([
"**/*.bazel",
"**/*.bzl",
]),
[
"crates.bzl",
],
visibility = ["//visibility:public"],
)

View file

@ -11,6 +11,16 @@ load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe") # buildifier: di
def raze_fetch_remote_crates():
"""This function defines a collection of repos and should be called in a WORKSPACE file"""
maybe(
http_archive,
name = "raze__addr2line__0_17_0",
url = "https://crates.io/api/v1/crates/addr2line/0.17.0/download",
type = "tar.gz",
sha256 = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b",
strip_prefix = "addr2line-0.17.0",
build_file = Label("//cargo/remote:BUILD.addr2line-0.17.0.bazel"),
)
maybe(
http_archive,
name = "raze__adler__1_0_2",
@ -141,6 +151,16 @@ def raze_fetch_remote_crates():
build_file = Label("//cargo/remote:BUILD.autocfg-1.1.0.bazel"),
)
maybe(
http_archive,
name = "raze__backtrace__0_3_66",
url = "https://crates.io/api/v1/crates/backtrace/0.3.66/download",
type = "tar.gz",
sha256 = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7",
strip_prefix = "backtrace-0.3.66",
build_file = Label("//cargo/remote:BUILD.backtrace-0.3.66.bazel"),
)
maybe(
http_archive,
name = "raze__base64__0_13_0",
@ -301,6 +321,16 @@ def raze_fetch_remote_crates():
build_file = Label("//cargo/remote:BUILD.convert_case-0.4.0.bazel"),
)
maybe(
http_archive,
name = "raze__convert_case__0_6_0",
url = "https://crates.io/api/v1/crates/convert_case/0.6.0/download",
type = "tar.gz",
sha256 = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca",
strip_prefix = "convert_case-0.6.0",
build_file = Label("//cargo/remote:BUILD.convert_case-0.6.0.bazel"),
)
maybe(
http_archive,
name = "raze__core_foundation__0_9_3",
@ -451,6 +481,16 @@ def raze_fetch_remote_crates():
build_file = Label("//cargo/remote:BUILD.dissimilar-1.0.4.bazel"),
)
maybe(
http_archive,
name = "raze__doc_comment__0_3_3",
url = "https://crates.io/api/v1/crates/doc-comment/0.3.3/download",
type = "tar.gz",
sha256 = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10",
strip_prefix = "doc-comment-0.3.3",
build_file = Label("//cargo/remote:BUILD.doc-comment-0.3.3.bazel"),
)
maybe(
http_archive,
name = "raze__dtoa__0_4_8",
@ -791,6 +831,16 @@ def raze_fetch_remote_crates():
build_file = Label("//cargo/remote:BUILD.getrandom-0.2.7.bazel"),
)
maybe(
http_archive,
name = "raze__gimli__0_26_2",
url = "https://crates.io/api/v1/crates/gimli/0.26.2/download",
type = "tar.gz",
sha256 = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d",
strip_prefix = "gimli-0.26.2",
build_file = Label("//cargo/remote:BUILD.gimli-0.26.2.bazel"),
)
maybe(
http_archive,
name = "raze__h2__0_3_14",
@ -1441,6 +1491,16 @@ def raze_fetch_remote_crates():
build_file = Label("//cargo/remote:BUILD.num_threads-0.1.6.bazel"),
)
maybe(
http_archive,
name = "raze__object__0_29_0",
url = "https://crates.io/api/v1/crates/object/0.29.0/download",
type = "tar.gz",
sha256 = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53",
strip_prefix = "object-0.29.0",
build_file = Label("//cargo/remote:BUILD.object-0.29.0.bazel"),
)
maybe(
http_archive,
name = "raze__once_cell__1_15_0",
@ -2071,6 +2131,16 @@ def raze_fetch_remote_crates():
build_file = Label("//cargo/remote:BUILD.rusqlite-0.28.0.bazel"),
)
maybe(
http_archive,
name = "raze__rustc_demangle__0_1_21",
url = "https://crates.io/api/v1/crates/rustc-demangle/0.1.21/download",
type = "tar.gz",
sha256 = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342",
strip_prefix = "rustc-demangle-0.1.21",
build_file = Label("//cargo/remote:BUILD.rustc-demangle-0.1.21.bazel"),
)
maybe(
http_archive,
name = "raze__rustc_hash__1_1_0",
@ -2431,6 +2501,26 @@ def raze_fetch_remote_crates():
build_file = Label("//cargo/remote:BUILD.smallvec-1.9.0.bazel"),
)
maybe(
http_archive,
name = "raze__snafu__0_7_2",
url = "https://crates.io/api/v1/crates/snafu/0.7.2/download",
type = "tar.gz",
sha256 = "dd726aec4ebad65756394ff89a9b9598793d4e30121cd71690244c1e497b3aee",
strip_prefix = "snafu-0.7.2",
build_file = Label("//cargo/remote:BUILD.snafu-0.7.2.bazel"),
)
maybe(
http_archive,
name = "raze__snafu_derive__0_7_2",
url = "https://crates.io/api/v1/crates/snafu-derive/0.7.2/download",
type = "tar.gz",
sha256 = "712529e9b0b014eabaa345b38e06032767e3dc393e8b017e853b1d7247094e74",
strip_prefix = "snafu-derive-0.7.2",
build_file = Label("//cargo/remote:BUILD.snafu-derive-0.7.2.bazel"),
)
maybe(
http_archive,
name = "raze__snowflake__1_3_0",
@ -2961,6 +3051,16 @@ def raze_fetch_remote_crates():
build_file = Label("//cargo/remote:BUILD.unicode-normalization-0.1.22.bazel"),
)
maybe(
http_archive,
name = "raze__unicode_segmentation__1_10_0",
url = "https://crates.io/api/v1/crates/unicode-segmentation/1.10.0/download",
type = "tar.gz",
sha256 = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a",
strip_prefix = "unicode-segmentation-1.10.0",
build_file = Label("//cargo/remote:BUILD.unicode-segmentation-1.10.0.bazel"),
)
maybe(
http_archive,
name = "raze__unicode_width__0_1_10",

View file

@ -1,4 +1,13 @@
[
{
"name": "addr2line",
"version": "0.17.0",
"authors": null,
"repository": "https://github.com/gimli-rs/addr2line",
"license": "Apache-2.0 OR MIT",
"license_file": null,
"description": "A cross-platform symbolication library written in Rust, using `gimli`"
},
{
"name": "adler",
"version": "1.0.2",
@ -134,6 +143,15 @@
"license_file": null,
"description": "Automatic cfg for Rust compiler features"
},
{
"name": "backtrace",
"version": "0.3.66",
"authors": "The Rust Project Developers",
"repository": "https://github.com/rust-lang/backtrace-rs",
"license": "Apache-2.0 OR MIT",
"license_file": null,
"description": "A library to acquire a stack trace (backtrace) at runtime in a Rust program."
},
{
"name": "base64",
"version": "0.13.0",
@ -251,6 +269,15 @@
"license_file": null,
"description": "Compares two equal-sized byte strings in constant time."
},
{
"name": "convert_case",
"version": "0.6.0",
"authors": "Rutrum <dave@rutrum.net>",
"repository": "https://github.com/rutrum/convert-case",
"license": "MIT",
"license_file": null,
"description": "Convert strings into any case"
},
{
"name": "core-foundation",
"version": "0.9.3",
@ -359,6 +386,15 @@
"license_file": null,
"description": "Diff library with semantic cleanup, based on Google's diff-match-patch"
},
{
"name": "doc-comment",
"version": "0.3.3",
"authors": "Guillaume Gomez <guillaume1.gomez@gmail.com>",
"repository": "https://github.com/GuillaumeGomez/doc-comment",
"license": "MIT",
"license_file": null,
"description": "Macro to generate doc comments"
},
{
"name": "either",
"version": "1.8.0",
@ -620,6 +656,15 @@
"license_file": null,
"description": "A small cross-platform library for retrieving random data from system source"
},
{
"name": "gimli",
"version": "0.26.2",
"authors": null,
"repository": "https://github.com/gimli-rs/gimli",
"license": "Apache-2.0 OR MIT",
"license_file": null,
"description": "A library for reading and writing the DWARF debugging format."
},
{
"name": "h2",
"version": "0.3.14",
@ -1142,6 +1187,15 @@
"license_file": null,
"description": "A minimal library that determines the number of running threads for the current process."
},
{
"name": "object",
"version": "0.29.0",
"authors": null,
"repository": "https://github.com/gimli-rs/object",
"license": "Apache-2.0 OR MIT",
"license_file": null,
"description": "A unified interface for reading and writing object file formats."
},
{
"name": "once_cell",
"version": "1.15.0",
@ -1565,6 +1619,15 @@
"license_file": null,
"description": "Ergonomic wrapper for SQLite"
},
{
"name": "rustc-demangle",
"version": "0.1.21",
"authors": "Alex Crichton <alex@alexcrichton.com>",
"repository": "https://github.com/alexcrichton/rustc-demangle",
"license": "Apache-2.0 OR MIT",
"license_file": null,
"description": "Rust compiler symbol demangling."
},
{
"name": "rustc-hash",
"version": "1.1.0",
@ -1844,6 +1907,24 @@
"license_file": null,
"description": "'Small vector' optimization: store up to a small number of items on the stack"
},
{
"name": "snafu",
"version": "0.7.2",
"authors": "Jake Goulding <jake.goulding@gmail.com>",
"repository": "https://github.com/shepmaster/snafu",
"license": "Apache-2.0 OR MIT",
"license_file": null,
"description": "An ergonomic error handling library"
},
{
"name": "snafu-derive",
"version": "0.7.2",
"authors": "Jake Goulding <jake.goulding@gmail.com>",
"repository": "https://github.com/shepmaster/snafu",
"license": "Apache-2.0 OR MIT",
"license_file": null,
"description": "An ergonomic error handling library"
},
{
"name": "snowflake",
"version": "1.3.0",
@ -2294,6 +2375,15 @@
"license_file": null,
"description": "This crate provides functions for normalization of Unicode strings, including Canonical and Compatible Decomposition and Recomposition, as described in Unicode Standard Annex #15."
},
{
"name": "unicode-segmentation",
"version": "1.10.0",
"authors": "kwantam <kwantam@gmail.com>|Manish Goregaokar <manishsmail@gmail.com>",
"repository": "https://github.com/unicode-rs/unicode-segmentation",
"license": "Apache-2.0 OR MIT",
"license_file": null,
"description": "This crate provides Grapheme Cluster, Word and Sentence boundaries according to Unicode Standard Annex #29 rules."
},
{
"name": "unicode-width",
"version": "0.1.10",

View file

@ -0,0 +1,63 @@
"""
@generated
cargo-raze crate build file.
DO NOT EDIT! Replaced on runs of cargo-raze
"""
# buildifier: disable=load
load("@bazel_skylib//lib:selects.bzl", "selects")
# buildifier: disable=load
load(
"@rules_rust//rust:defs.bzl",
"rust_binary",
"rust_library",
"rust_proc_macro",
"rust_test",
)
package(default_visibility = [
# Public for visibility by "@raze__crate__version//" targets.
#
# Prefer access through "//cargo", which limits external
# visibility to explicit Cargo.toml dependencies.
"//visibility:public",
])
licenses([
"notice", # Apache-2.0 from expression "Apache-2.0 OR MIT"
])
# Generated Targets
# Unsupported target "addr2line" with type "example" omitted
rust_library(
name = "addr2line",
srcs = glob(["**/*.rs"]),
crate_features = [
],
crate_root = "src/lib.rs",
data = [],
edition = "2015",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-raze",
"crate-name=addr2line",
"manual",
],
version = "0.17.0",
# buildifier: leave-alone
deps = [
"@raze__gimli__0_26_2//:gimli",
],
)
# Unsupported target "correctness" with type "test" omitted
# Unsupported target "output_equivalence" with type "test" omitted
# Unsupported target "parse" with type "test" omitted

View file

@ -66,7 +66,6 @@ cargo_build_script(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [
@ -80,7 +79,6 @@ cargo_build_script(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [
@ -123,7 +121,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [
@ -138,7 +135,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [

View file

@ -61,7 +61,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [

View file

@ -0,0 +1,111 @@
"""
@generated
cargo-raze crate build file.
DO NOT EDIT! Replaced on runs of cargo-raze
"""
# buildifier: disable=load
load("@bazel_skylib//lib:selects.bzl", "selects")
# buildifier: disable=load
load(
"@rules_rust//rust:defs.bzl",
"rust_binary",
"rust_library",
"rust_proc_macro",
"rust_test",
)
package(default_visibility = [
# Public for visibility by "@raze__crate__version//" targets.
#
# Prefer access through "//cargo", which limits external
# visibility to explicit Cargo.toml dependencies.
"//visibility:public",
])
licenses([
"notice", # MIT from expression "MIT OR Apache-2.0"
])
# Generated Targets
# buildifier: disable=out-of-order-load
# buildifier: disable=load-on-top
load(
"@rules_rust//cargo:cargo_build_script.bzl",
"cargo_build_script",
)
cargo_build_script(
name = "backtrace_build_script",
srcs = glob(["**/*.rs"]),
build_script_env = {
},
crate_features = [
"default",
"std",
],
crate_root = "build.rs",
data = glob(["**"]),
edition = "2018",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-raze",
"manual",
],
version = "0.3.66",
visibility = ["//visibility:private"],
deps = [
"@raze__cc__1_0_73//:cc",
],
)
# Unsupported target "benchmarks" with type "bench" omitted
# Unsupported target "backtrace" with type "example" omitted
# Unsupported target "raw" with type "example" omitted
rust_library(
name = "backtrace",
srcs = glob(["**/*.rs"]),
crate_features = [
"default",
"std",
],
crate_root = "src/lib.rs",
data = [],
edition = "2018",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-raze",
"crate-name=backtrace",
"manual",
],
version = "0.3.66",
# buildifier: leave-alone
deps = [
":backtrace_build_script",
"@raze__addr2line__0_17_0//:addr2line",
"@raze__cfg_if__1_0_0//:cfg_if",
"@raze__libc__0_2_133//:libc",
"@raze__miniz_oxide__0_5_4//:miniz_oxide",
"@raze__object__0_29_0//:object",
"@raze__rustc_demangle__0_1_21//:rustc_demangle",
],
)
# Unsupported target "accuracy" with type "test" omitted
# Unsupported target "concurrent-panics" with type "test" omitted
# Unsupported target "long_fn_name" with type "test" omitted
# Unsupported target "skip_inner_frames" with type "test" omitted
# Unsupported target "smoke" with type "test" omitted

View file

@ -1,17 +0,0 @@
# Export file for Stardoc support
exports_files(
glob([
"**/*.bazel",
"**/*.bzl",
]),
visibility = ["//visibility:public"],
)
filegroup(
name = "srcs",
srcs = glob([
"**/*.bazel",
"**/*.bzl",
]),
visibility = ["//visibility:public"],
)

View file

@ -61,7 +61,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [

View file

@ -0,0 +1,57 @@
"""
@generated
cargo-raze crate build file.
DO NOT EDIT! Replaced on runs of cargo-raze
"""
# buildifier: disable=load
load("@bazel_skylib//lib:selects.bzl", "selects")
# buildifier: disable=load
load(
"@rules_rust//rust:defs.bzl",
"rust_binary",
"rust_library",
"rust_proc_macro",
"rust_test",
)
package(default_visibility = [
# Public for visibility by "@raze__crate__version//" targets.
#
# Prefer access through "//cargo", which limits external
# visibility to explicit Cargo.toml dependencies.
"//visibility:public",
])
licenses([
"notice", # MIT from expression "MIT"
])
# Generated Targets
rust_library(
name = "convert_case",
srcs = glob(["**/*.rs"]),
crate_features = [
],
crate_root = "src/lib.rs",
data = [],
edition = "2018",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-raze",
"crate-name=convert_case",
"manual",
],
version = "0.6.0",
# buildifier: leave-alone
deps = [
"@raze__unicode_segmentation__1_10_0//:unicode_segmentation",
],
)
# Unsupported target "string_types" with type "test" omitted

View file

@ -59,7 +59,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [

View file

@ -0,0 +1,84 @@
"""
@generated
cargo-raze crate build file.
DO NOT EDIT! Replaced on runs of cargo-raze
"""
# buildifier: disable=load
load("@bazel_skylib//lib:selects.bzl", "selects")
# buildifier: disable=load
load(
"@rules_rust//rust:defs.bzl",
"rust_binary",
"rust_library",
"rust_proc_macro",
"rust_test",
)
package(default_visibility = [
# Public for visibility by "@raze__crate__version//" targets.
#
# Prefer access through "//cargo", which limits external
# visibility to explicit Cargo.toml dependencies.
"//visibility:public",
])
licenses([
"notice", # MIT from expression "MIT"
])
# Generated Targets
# buildifier: disable=out-of-order-load
# buildifier: disable=load-on-top
load(
"@rules_rust//cargo:cargo_build_script.bzl",
"cargo_build_script",
)
cargo_build_script(
name = "doc_comment_build_script",
srcs = glob(["**/*.rs"]),
build_script_env = {
},
crate_features = [
],
crate_root = "build.rs",
data = glob(["**"]),
edition = "2015",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-raze",
"manual",
],
version = "0.3.3",
visibility = ["//visibility:private"],
deps = [
],
)
rust_library(
name = "doc_comment",
srcs = glob(["**/*.rs"]),
crate_features = [
],
crate_root = "src/lib.rs",
data = [],
edition = "2015",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-raze",
"crate-name=doc_comment",
"manual",
],
version = "0.3.3",
# buildifier: leave-alone
deps = [
":doc_comment_build_script",
],
)

View file

@ -65,7 +65,6 @@ cargo_build_script(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [
@ -107,7 +106,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [

View file

@ -63,7 +63,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [

70
cargo/remote/BUILD.gimli-0.26.2.bazel vendored Normal file
View file

@ -0,0 +1,70 @@
"""
@generated
cargo-raze crate build file.
DO NOT EDIT! Replaced on runs of cargo-raze
"""
# buildifier: disable=load
load("@bazel_skylib//lib:selects.bzl", "selects")
# buildifier: disable=load
load(
"@rules_rust//rust:defs.bzl",
"rust_binary",
"rust_library",
"rust_proc_macro",
"rust_test",
)
package(default_visibility = [
# Public for visibility by "@raze__crate__version//" targets.
#
# Prefer access through "//cargo", which limits external
# visibility to explicit Cargo.toml dependencies.
"//visibility:public",
])
licenses([
"notice", # MIT from expression "MIT OR Apache-2.0"
])
# Generated Targets
# Unsupported target "bench" with type "bench" omitted
# Unsupported target "dwarf-validate" with type "example" omitted
# Unsupported target "dwarfdump" with type "example" omitted
# Unsupported target "simple" with type "example" omitted
# Unsupported target "simple_line" with type "example" omitted
rust_library(
name = "gimli",
srcs = glob(["**/*.rs"]),
crate_features = [
"read",
"read-core",
],
crate_root = "src/lib.rs",
data = [],
edition = "2018",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-raze",
"crate-name=gimli",
"manual",
],
version = "0.26.2",
# buildifier: leave-alone
deps = [
],
)
# Unsupported target "convert_self" with type "test" omitted
# Unsupported target "parse_self" with type "test" omitted

View file

@ -63,7 +63,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [
"@raze__core_foundation_sys__0_8_3//:core_foundation_sys",

View file

@ -59,7 +59,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [

View file

@ -70,7 +70,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [

View file

@ -63,7 +63,6 @@ cargo_build_script(
"@rules_rust//rust/platform:x86_64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [
"@raze__security_framework_sys__2_6_1//:security_framework_sys",
@ -122,7 +121,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [
"@raze__lazy_static__1_4_0//:lazy_static",

View file

@ -61,7 +61,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [

View file

@ -58,7 +58,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [
"@raze__libc__0_2_133//:libc",

66
cargo/remote/BUILD.object-0.29.0.bazel vendored Normal file
View file

@ -0,0 +1,66 @@
"""
@generated
cargo-raze crate build file.
DO NOT EDIT! Replaced on runs of cargo-raze
"""
# buildifier: disable=load
load("@bazel_skylib//lib:selects.bzl", "selects")
# buildifier: disable=load
load(
"@rules_rust//rust:defs.bzl",
"rust_binary",
"rust_library",
"rust_proc_macro",
"rust_test",
)
package(default_visibility = [
# Public for visibility by "@raze__crate__version//" targets.
#
# Prefer access through "//cargo", which limits external
# visibility to explicit Cargo.toml dependencies.
"//visibility:public",
])
licenses([
"notice", # Apache-2.0 from expression "Apache-2.0 OR MIT"
])
# Generated Targets
rust_library(
name = "object",
srcs = glob(["**/*.rs"]),
crate_features = [
"archive",
"coff",
"elf",
"macho",
"pe",
"read_core",
"unaligned",
],
crate_root = "src/lib.rs",
data = [],
edition = "2018",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-raze",
"crate-name=object",
"manual",
],
version = "0.29.0",
# buildifier: leave-alone
deps = [
"@raze__memchr__2_5_0//:memchr",
],
)
# Unsupported target "integration" with type "test" omitted
# Unsupported target "parse_self" with type "test" omitted

View file

@ -64,7 +64,6 @@ cargo_build_script(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [
@ -112,7 +111,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [

View file

@ -84,7 +84,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [
@ -98,7 +97,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [

View file

@ -69,7 +69,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [

View file

@ -104,7 +104,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [

View file

@ -0,0 +1,54 @@
"""
@generated
cargo-raze crate build file.
DO NOT EDIT! Replaced on runs of cargo-raze
"""
# buildifier: disable=load
load("@bazel_skylib//lib:selects.bzl", "selects")
# buildifier: disable=load
load(
"@rules_rust//rust:defs.bzl",
"rust_binary",
"rust_library",
"rust_proc_macro",
"rust_test",
)
package(default_visibility = [
# Public for visibility by "@raze__crate__version//" targets.
#
# Prefer access through "//cargo", which limits external
# visibility to explicit Cargo.toml dependencies.
"//visibility:public",
])
licenses([
"notice", # MIT from expression "MIT OR Apache-2.0"
])
# Generated Targets
rust_library(
name = "rustc_demangle",
srcs = glob(["**/*.rs"]),
crate_features = [
],
crate_root = "src/lib.rs",
data = [],
edition = "2015",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-raze",
"crate-name=rustc-demangle",
"manual",
],
version = "0.1.21",
# buildifier: leave-alone
deps = [
],
)

View file

@ -62,7 +62,6 @@ rust_library(
(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [

134
cargo/remote/BUILD.snafu-0.7.2.bazel vendored Normal file
View file

@ -0,0 +1,134 @@
"""
@generated
cargo-raze crate build file.
DO NOT EDIT! Replaced on runs of cargo-raze
"""
# buildifier: disable=load
load("@bazel_skylib//lib:selects.bzl", "selects")
# buildifier: disable=load
load(
"@rules_rust//rust:defs.bzl",
"rust_binary",
"rust_library",
"rust_proc_macro",
"rust_test",
)
package(default_visibility = [
# Public for visibility by "@raze__crate__version//" targets.
#
# Prefer access through "//cargo", which limits external
# visibility to explicit Cargo.toml dependencies.
"//visibility:public",
])
licenses([
"notice", # MIT from expression "MIT OR Apache-2.0"
])
# Generated Targets
rust_library(
name = "snafu",
srcs = glob(["**/*.rs"]),
crate_features = [
"backtrace",
"backtraces",
"default",
"rust_1_39",
"rust_1_46",
"std",
],
crate_root = "src/lib.rs",
data = [],
compile_data = glob(["**/*.md"]),
edition = "2018",
proc_macro_deps = [
"@raze__snafu_derive__0_7_2//:snafu_derive",
],
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-raze",
"crate-name=snafu",
"manual",
],
version = "0.7.2",
# buildifier: leave-alone
deps = [
"@raze__backtrace__0_3_66//:backtrace",
"@raze__doc_comment__0_3_3//:doc_comment",
],
)
# Unsupported target "backtrace" with type "test" omitted
# Unsupported target "backtrace-optional" with type "test" omitted
# Unsupported target "backtrace-optional-enabled" with type "test" omitted
# Unsupported target "backtrace_attributes" with type "test" omitted
# Unsupported target "basic" with type "test" omitted
# Unsupported target "boxed_error_trait_object" with type "test" omitted
# Unsupported target "build-leaf-error" with type "test" omitted
# Unsupported target "context_selector_name" with type "test" omitted
# Unsupported target "default_error_display" with type "test" omitted
# Unsupported target "display-shorthand" with type "test" omitted
# Unsupported target "doc_comment" with type "test" omitted
# Unsupported target "ensure" with type "test" omitted
# Unsupported target "error_chain" with type "test" omitted
# Unsupported target "generics" with type "test" omitted
# Unsupported target "generics_with_default" with type "test" omitted
# Unsupported target "implicit" with type "test" omitted
# Unsupported target "location" with type "test" omitted
# Unsupported target "mapping_result_without_try_operator" with type "test" omitted
# Unsupported target "module" with type "test" omitted
# Unsupported target "multiple_attributes" with type "test" omitted
# Unsupported target "name-conflicts" with type "test" omitted
# Unsupported target "no_context" with type "test" omitted
# Unsupported target "opaque" with type "test" omitted
# Unsupported target "options" with type "test" omitted
# Unsupported target "premade_error" with type "test" omitted
# Unsupported target "raw_idents" with type "test" omitted
# Unsupported target "recursive_error" with type "test" omitted
# Unsupported target "report" with type "test" omitted
# Unsupported target "send_between_threads" with type "test" omitted
# Unsupported target "single_use_lifetimes_lint" with type "test" omitted
# Unsupported target "source_attributes" with type "test" omitted
# Unsupported target "stringly_typed" with type "test" omitted
# Unsupported target "structs" with type "test" omitted
# Unsupported target "visibility" with type "test" omitted

View file

@ -0,0 +1,60 @@
"""
@generated
cargo-raze crate build file.
DO NOT EDIT! Replaced on runs of cargo-raze
"""
# buildifier: disable=load
load("@bazel_skylib//lib:selects.bzl", "selects")
# buildifier: disable=load
load(
"@rules_rust//rust:defs.bzl",
"rust_binary",
"rust_library",
"rust_proc_macro",
"rust_test",
)
package(default_visibility = [
# Public for visibility by "@raze__crate__version//" targets.
#
# Prefer access through "//cargo", which limits external
# visibility to explicit Cargo.toml dependencies.
"//visibility:public",
])
licenses([
"notice", # MIT from expression "MIT OR Apache-2.0"
])
# Generated Targets
rust_proc_macro(
name = "snafu_derive",
srcs = glob(["**/*.rs"]),
crate_features = [
"rust_1_39",
"rust_1_46",
],
crate_root = "src/lib.rs",
data = [],
edition = "2018",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-raze",
"crate-name=snafu-derive",
"manual",
],
version = "0.7.2",
# buildifier: leave-alone
deps = [
"@raze__heck__0_4_0//:heck",
"@raze__proc_macro2__1_0_43//:proc_macro2",
"@raze__quote__1_0_21//:quote",
"@raze__syn__1_0_100//:syn",
],
)

View file

@ -60,7 +60,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [

View file

@ -62,7 +62,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [

View file

@ -73,7 +73,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [

View file

@ -90,7 +90,6 @@ cargo_build_script(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [
@ -103,7 +102,6 @@ cargo_build_script(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [
@ -183,7 +181,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [
@ -197,7 +194,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [

View file

@ -0,0 +1,60 @@
"""
@generated
cargo-raze crate build file.
DO NOT EDIT! Replaced on runs of cargo-raze
"""
# buildifier: disable=load
load("@bazel_skylib//lib:selects.bzl", "selects")
# buildifier: disable=load
load(
"@rules_rust//rust:defs.bzl",
"rust_binary",
"rust_library",
"rust_proc_macro",
"rust_test",
)
package(default_visibility = [
# Public for visibility by "@raze__crate__version//" targets.
#
# Prefer access through "//cargo", which limits external
# visibility to explicit Cargo.toml dependencies.
"//visibility:public",
])
licenses([
"notice", # MIT from expression "MIT OR Apache-2.0"
])
# Generated Targets
# Unsupported target "graphemes" with type "bench" omitted
# Unsupported target "unicode_words" with type "bench" omitted
# Unsupported target "word_bounds" with type "bench" omitted
rust_library(
name = "unicode_segmentation",
srcs = glob(["**/*.rs"]),
crate_features = [
],
crate_root = "src/lib.rs",
data = [],
edition = "2018",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-raze",
"crate-name=unicode-segmentation",
"manual",
],
version = "1.10.0",
# buildifier: leave-alone
deps = [
],
)

View file

@ -59,7 +59,6 @@ rust_library(
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
"@rules_rust//rust/platform:aarch64-apple-darwin",
"@rules_rust//rust/platform:aarch64-apple-ios",
"@rules_rust//rust/platform:aarch64-apple-ios-sim",
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu",
"@rules_rust//rust/platform:x86_64-apple-ios",
): [

View file

@ -12,6 +12,7 @@ errors-please-check-database = Please use the Check Database action, then try ag
errors-please-check-media = Please use the Check Media action, then try again.
errors-collection-too-new = This collection requires a newer version of Anki to open.
errors-invalid-ids = This deck contains timestamps in the future. Please contact the deck author and ask them to fix the issue.
errors-inconsistent-db-state = Your database appears to be in an inconsistent state. Please use the Check Database action.
## Card Rendering

View file

@ -66,10 +66,14 @@ message BackendError {
CARD_TYPE_ERROR = 18;
}
// localized error description suitable for displaying to the user
string localized = 1;
// error description, usually localized, suitable for displaying to the user
string message = 1;
// the error subtype
Kind kind = 2;
// optional page in the manual
optional links.HelpPageLinkRequest.HelpPage help_page = 3;
// additional information about the context in which the error occured
string context = 4;
// a backtrace of the underlying error; requires RUST_BACKTRACE to be set
string backtrace = 5;
}

View file

@ -33,9 +33,9 @@ def setup_protobuf_binary(name):
http_archive,
name = "protoc_bin_macos",
urls = [
"https://github.com/protocolbuffers/protobuf/releases/download/v3.19.4/protoc-3.19.4-osx-x86_64.zip",
"https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-osx-universal_binary.zip",
],
sha256 = "d8b55cf1e887917dd43c447d77bd5bd213faff1e18ac3a176b35558d86f7ffff",
sha256 = "e3324d3bc2e9bc967a0bec2472e0ec73b26f952c7c87f2403197414f780c3c6c",
build_file_content = """exports_files(["bin/protoc"])""",
)
@ -43,9 +43,9 @@ def setup_protobuf_binary(name):
http_archive,
name = "protoc_bin_linux_x86_64",
urls = [
"https://github.com/protocolbuffers/protobuf/releases/download/v3.19.4/protoc-3.19.4-linux-x86_64.zip",
"https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-linux-x86_64.zip",
],
sha256 = "058d29255a08f8661c8096c92961f3676218704cbd516d3916ec468e139cbd87",
sha256 = "f90d0dd59065fef94374745627336d622702b67f0319f96cee894d41a974d47a",
build_file_content = """exports_files(["bin/protoc"])""",
)
@ -53,9 +53,9 @@ def setup_protobuf_binary(name):
http_archive,
name = "protoc_bin_linux_arm64",
urls = [
"https://github.com/protocolbuffers/protobuf/releases/download/v3.19.4/protoc-3.19.4-linux-aarch_64.zip",
"https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-linux-aarch_64.zip",
],
sha256 = "95584939e733bdd6ffb8245616b2071f565cd4c28163b6c21c8f936a9ee20861",
sha256 = "f3d8eb5839d6186392d8c7b54fbeabbb6fcdd90618a500b77cb2e24faa245cad",
build_file_content = """exports_files(["bin/protoc"])""",
)
@ -63,9 +63,9 @@ def setup_protobuf_binary(name):
http_archive,
name = "protoc_bin_windows",
urls = [
"https://github.com/protocolbuffers/protobuf/releases/download/v3.19.4/protoc-3.19.4-win64.zip",
"https://github.com/protocolbuffers/protobuf/releases/download/v21.8/protoc-21.8-win64.zip",
],
sha256 = "828d2bdfe410e988cfc46462bcabd34ffdda8cc172867989ec647eadc55b03b5",
sha256 = "3657053024faa439ff5f8c1dd2ee06bac0f9b9a3d660e99944f015a7451e87ec",
build_file_content = """exports_files(["bin/protoc.exe"])""",
)

View file

@ -3,30 +3,6 @@ ignore-patterns=.*_pb2.*
persistent = no
extension-pkg-whitelist=orjson
[TYPECHECK]
ignored-classes=
BrowserColumns,
BrowserRow,
HelpPage,
FormatTimespanRequest,
CardAnswer,
QueuedCards,
UnburyDeckRequest,
BuryOrSuspendCardsRequest,
NoteFieldsCheckResponse,
BackendError,
SetDeckCollapsedRequest,
ConfigKey,
HelpPageLinkRequest,
StripHtmlRequest,
CustomStudyRequest,
Cram,
ScheduleCardsAsNewRequest,
ExportLimit,
CsvColumn,
CsvMetadata,
ImportCsvRequest,
[REPORTS]
output-format=colorized
@ -64,3 +40,6 @@ good-names =
db,
ok,
ip,
[IMPORTS]
ignored-modules = anki.*_pb2, anki.sync_pb2

View file

@ -19,6 +19,7 @@ from anki.dbproxy import ValueForDB
from anki.utils import from_json_bytes, to_json_bytes
from ..errors import (
BackendError,
BackendIOError,
CardTypeError,
CustomStudyError,
@ -27,7 +28,6 @@ from ..errors import (
FilteredDeckError,
Interrupted,
InvalidInput,
LocalizedError,
NetworkError,
NotFoundError,
SearchError,
@ -172,58 +172,64 @@ class Translations(GeneratedTranslations):
def backend_exception_to_pylib(err: backend_pb2.BackendError) -> Exception:
kind = backend_pb2.BackendError
val = err.kind
help_page = err.help_page if err.HasField("help_page") else None
context = err.context if err.context else None
backtrace = err.backtrace if err.backtrace else None
if val == kind.INTERRUPTED:
return Interrupted()
return Interrupted(err.message, help_page, context, backtrace)
elif val == kind.NETWORK_ERROR:
return NetworkError(err.localized)
return NetworkError(err.message, help_page, context, backtrace)
elif val == kind.SYNC_AUTH_ERROR:
return SyncError(err.localized, SyncErrorKind.AUTH)
return SyncError(err.message, help_page, context, backtrace, SyncErrorKind.AUTH)
elif val == kind.SYNC_OTHER_ERROR:
return SyncError(err.localized, SyncErrorKind.OTHER)
return SyncError(
err.message, help_page, context, backtrace, SyncErrorKind.OTHER
)
elif val == kind.IO_ERROR:
return BackendIOError(err.localized)
return BackendIOError(err.message, help_page, context, backtrace)
elif val == kind.DB_ERROR:
return DBError(err.localized)
return DBError(err.message, help_page, context, backtrace)
elif val == kind.CARD_TYPE_ERROR:
return CardTypeError(err.localized, err.help_page)
return CardTypeError(err.message, help_page, context, backtrace)
elif val == kind.TEMPLATE_PARSE:
return TemplateError(err.localized)
return TemplateError(err.message, help_page, context, backtrace)
elif val == kind.INVALID_INPUT:
return InvalidInput(err.localized)
return InvalidInput(err.message, help_page, context, backtrace)
elif val == kind.JSON_ERROR:
return LocalizedError(err.localized)
return BackendError(err.message, help_page, context, backtrace)
elif val == kind.NOT_FOUND_ERROR:
return NotFoundError()
return NotFoundError(err.message, help_page, context, backtrace)
elif val == kind.EXISTS:
return ExistsError()
return ExistsError(err.message, help_page, context, backtrace)
elif val == kind.FILTERED_DECK_ERROR:
return FilteredDeckError(err.localized)
return FilteredDeckError(err.message, help_page, context, backtrace)
elif val == kind.PROTO_ERROR:
return LocalizedError(err.localized)
return BackendError(err.message, help_page, context, backtrace)
elif val == kind.SEARCH_ERROR:
return SearchError(markdown(err.localized))
return SearchError(markdown(err.message), help_page, context, backtrace)
elif val == kind.UNDO_EMPTY:
return UndoEmpty()
return UndoEmpty(err.message, help_page, context, backtrace)
elif val == kind.CUSTOM_STUDY_ERROR:
return CustomStudyError(err.localized)
return CustomStudyError(err.message, help_page, context, backtrace)
else:
# sadly we can't do exhaustiveness checking on protobuf enums
# assert_exhaustive(val)
return LocalizedError(err.localized)
return BackendError(err.message, help_page, context, backtrace)

View file

@ -25,30 +25,31 @@ class AnkiException(Exception):
"""
class LocalizedError(AnkiException):
"An error with a localized description."
class BackendError(AnkiException):
"An error originating from Anki's backend."
def __init__(self, localized: str) -> None:
self._localized = localized
def __init__(
self,
message: str,
help_page: anki.collection.HelpPage.V | None,
context: str | None,
backtrace: str | None,
) -> None:
super().__init__()
self._message = message
self.help_page = help_page
self.context = context
self.backtrace = backtrace
def __str__(self) -> str:
return self._localized
return self._message
class DocumentedError(LocalizedError):
"""A localized error described in the manual."""
def __init__(self, localized: str, help_page: anki.collection.HelpPage.V) -> None:
self.help_page = help_page
super().__init__(localized)
class Interrupted(AnkiException):
class Interrupted(BackendError):
pass
class NetworkError(LocalizedError):
class NetworkError(BackendError):
pass
@ -57,57 +58,64 @@ class SyncErrorKind(Enum):
OTHER = 2
class SyncError(LocalizedError):
def __init__(self, localized: str, kind: SyncErrorKind):
class SyncError(BackendError):
def __init__(
self,
message: str,
help_page: anki.collection.HelpPage.V | None,
context: str | None,
backtrace: str | None,
kind: SyncErrorKind,
):
self.kind = kind
super().__init__(localized)
super().__init__(message, help_page, context, backtrace)
class BackendIOError(LocalizedError):
class BackendIOError(BackendError):
pass
class CustomStudyError(LocalizedError):
class CustomStudyError(BackendError):
pass
class DBError(LocalizedError):
class DBError(BackendError):
pass
class CardTypeError(DocumentedError):
class CardTypeError(BackendError):
pass
class TemplateError(LocalizedError):
class TemplateError(BackendError):
pass
class NotFoundError(AnkiException):
class NotFoundError(BackendError):
pass
class DeletedError(LocalizedError):
class DeletedError(BackendError):
pass
class ExistsError(AnkiException):
class ExistsError(BackendError):
pass
class UndoEmpty(AnkiException):
class UndoEmpty(BackendError):
pass
class FilteredDeckError(LocalizedError):
class FilteredDeckError(BackendError):
pass
class InvalidInput(LocalizedError):
class InvalidInput(BackendError):
pass
class SearchError(LocalizedError):
class SearchError(BackendError):
pass

View file

@ -66,6 +66,15 @@ alias(
],
)
alias(
name = "convert_case",
actual = "@raze__convert_case__0_6_0//:convert_case",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "csv",
actual = "@raze__csv__1_1_6//:csv",
@ -489,6 +498,15 @@ alias(
],
)
alias(
name = "snafu",
actual = "@raze__snafu__0_7_2//:snafu",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "strum",
actual = "@raze__strum__0_24_1//:strum",

View file

@ -4,22 +4,7 @@ extension-pkg-whitelist=PyQt6
ignore = forms,hooks_gen.py
[TYPECHECK]
ignored-modules=win32file,pywintypes,socket,win32pipe,winrt,pyaudio
ignored-classes=
BrowserColumns,
BrowserRow,
SearchNode,
ConfigKey,
OpChanges,
UnburyDeckRequest,
CardAnswer,
QueuedCards,
ChangeNotetypeRequest,
CustomStudyRequest,
Cram,
ScheduleCardsAsNewRequest,
CsvColumn,
CsvMetadata,
ignored-modules=win32file,pywintypes,socket,win32pipe,winrt,pyaudio,anki.scheduler_pb2
[REPORTS]
output-format=colorized

View file

@ -11,7 +11,7 @@ from anki.cards import Card, CardId
from anki.collection import BrowserColumns as Columns
from anki.collection import Collection
from anki.consts import *
from anki.errors import LocalizedError, NotFoundError
from anki.errors import BackendError, NotFoundError
from anki.notes import Note, NoteId
from aqt import gui_hooks
from aqt.browser.table import Cell, CellRow, Column, ItemId, SearchContext
@ -101,7 +101,7 @@ class DataModel(QAbstractTableModel):
def _fetch_row_from_backend(self, item: ItemId) -> CellRow:
try:
row = CellRow(*self.col.browser_row_for_id(item))
except LocalizedError as e:
except BackendError as e:
return CellRow.disabled(self.len_columns(), str(e))
except Exception as e:
return CellRow.disabled(

View file

@ -194,7 +194,7 @@ class NoteState(ItemState):
def get_card(self, item: ItemId) -> Card:
if cards := self.get_note(item).cards():
return cards[0]
raise NotFoundError
raise NotFoundError("card not found", None, None, None)
def get_note(self, item: ItemId) -> Note:
return self.col.get_note(NoteId(item))

View file

@ -12,7 +12,7 @@ from typing import TYPE_CHECKING, Optional, TextIO, cast
from markdown import markdown
import aqt
from anki.errors import DocumentedError, Interrupted, LocalizedError
from anki.errors import BackendError, Interrupted
from aqt.qt import *
from aqt.utils import showText, showWarning, supportText, tr
@ -25,15 +25,19 @@ def show_exception(*, parent: QWidget, exception: Exception) -> None:
if isinstance(exception, Interrupted):
# nothing to do
return
help_page = exception.help_page if isinstance(exception, DocumentedError) else None
if not isinstance(exception, LocalizedError):
if isinstance(exception, BackendError):
if exception.context:
print(exception.context)
if exception.backtrace:
print(exception.backtrace)
showWarning(str(exception), parent=parent, help=exception.help_page)
else:
# if the error is not originating from the backend, dump
# a traceback to the console to aid in debugging
traceback.print_exception(
None, exception, exception.__traceback__, file=sys.stdout
)
showWarning(str(exception), parent=parent, help=help_page)
showWarning(str(exception), parent=parent)
if not os.environ.get("DEBUG"):

View file

@ -35,7 +35,6 @@ cargo_target = output_root / f"target-{platform.machine()}"
artifacts = output_root / "artifacts"
pyo3_config = output_root / "pyo3-build-config-file.txt"
pyoxidizer_folder = bazel_external / "pyoxidizer"
arm64_protobuf_wheel = bazel_external / "protobuf_wheel_mac_arm64"
pyoxidizer_binary = cargo_target / "release" / with_exe_extension("pyoxidizer")
for path in dist_folder.glob("*.zst"):
@ -64,7 +63,7 @@ elif sys.platform.startswith("darwin"):
else:
pyqt5_folder_name = "pyqt514"
os.environ["TARGET"] = "x86_64-apple-darwin"
os.environ["MACOSX_DEPLOYMENT_TARGET"] = "10.13"
os.environ["MACOSX_DEPLOYMENT_TARGET"] = "10.14"
else:
is_lin = True
if platform.machine() == "x86_64":
@ -133,17 +132,8 @@ def install_wheels_into_venv():
buf = f.read()
with open(constraints, "w") as f:
extracted = re.findall("^(\S+==\S+) ", buf, flags=re.M)
extracted = [
line for line in extracted if not arm64_mac or "protobuf" not in line
]
extracted = [line for line in extracted if "protobuf" not in line]
f.write("\n".join(extracted))
# pypi protobuf lacks C extension on darwin-arm64, so we have to use a version
# we built ourselves
if arm64_mac:
wheels = glob.glob(str(arm64_protobuf_wheel / "*.whl"))
subprocess.run(
[pip, "install", "--upgrade", "-c", constraints, *wheels], check=True
)
# install wheels and upgrade any deps
wheels = glob.glob(str(workspace / ".bazel" / "out" / "dist" / "*.whl"))
subprocess.run(

View file

@ -115,12 +115,12 @@ def register_repos():
################
core_i18n_repo = "anki-core-i18n"
core_i18n_commit = "b9d5c896f22fe6e79810194f41222d8638c13e16"
core_i18n_zip_csum = "8ef7888373cacf682c17f41056dc1f5348f60a15e1809c8db0f66f4072e7d5fb"
core_i18n_commit = "3817899a01a67c8d3a9426268af466b935d762b8"
core_i18n_zip_csum = "7146dac6b6f6b2dafe6b6a9e03a9126e2a7772abb9a16fe66ecb359ef5010675"
qtftl_i18n_repo = "anki-desktop-ftl"
qtftl_i18n_commit = "a8bd0e284e2785421180af2ce10dd1d534b0033d"
qtftl_i18n_zip_csum = "f88398324a64be99521bd5cd7e79e7dda64c31a2cd4e568328a211c7765b23ac"
qtftl_i18n_commit = "f1f4859e6bcdd18f0b077ad4f9169a518d9634bf"
qtftl_i18n_zip_csum = "96fafee1fe6416586775fd33ce08d5656c7d41b3e09ef680eb62a1f486b8f35c"
i18n_build_content = """
filegroup(
@ -194,16 +194,6 @@ exports_files(["l10n.toml"])
sha256 = "0815a601baba05e03bc36b568cdc2332b1cf4aa17125fc33c69de125f8dd687f",
)
maybe(
http_archive,
name = "protobuf_wheel_mac_arm64",
build_file_content = " ",
urls = [
"https://github.com/ankitects/anki-bundle-extras/releases/download/anki-2022-02-09/protobuf-wheel-mac-arm64.tar",
],
sha256 = "401d1cd6d949af463b3945f0d5dc887185b27fa5478cb6847bf94f680ea797b4",
)
maybe(
http_archive,
name = "audio_mac_amd64",

View file

@ -77,6 +77,7 @@ rust_library(
"//rslib/cargo:bytes",
"//rslib/cargo:chrono",
"//rslib/cargo:coarsetime",
"//rslib/cargo:convert_case",
"//rslib/cargo:csv",
"//rslib/cargo:dissimilar",
"//rslib/cargo:flate2",
@ -110,6 +111,7 @@ rust_library(
"//rslib/cargo:slog_async",
"//rslib/cargo:slog_envlogger",
"//rslib/cargo:slog_term",
"//rslib/cargo:snafu",
"//rslib/cargo:strum",
"//rslib/cargo:tempfile",
"//rslib/cargo:tokio",

View file

@ -103,3 +103,5 @@ zstd = { version="0.11.2", features=["zstdmt"] }
num_cpus = "1.13.1"
csv = { git="https://github.com/ankitects/rust-csv.git", rev="1c9d3aab6f79a7d815c69f925a46a4590c115f90" }
dissimilar = "1.0.4"
snafu = { version = "0.7.2", features = ["backtraces"] }
convert_case = "0.6.0"

View file

@ -32,7 +32,7 @@ pub trait Service {
}
buf.push_str(
r#"
_ => Err(crate::error::AnkiError::invalid_input("invalid command")),
_ => crate::invalid_input!("invalid command"),
}
}
"#,

View file

@ -66,6 +66,15 @@ alias(
],
)
alias(
name = "convert_case",
actual = "@raze__convert_case__0_6_0//:convert_case",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "csv",
actual = "@raze__csv__1_1_6//:csv",
@ -489,6 +498,15 @@ alias(
],
)
alias(
name = "snafu",
actual = "@raze__snafu__0_7_2//:snafu",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "strum",
actual = "@raze__strum__0_24_1//:strum",

View file

@ -66,6 +66,15 @@ alias(
],
)
alias(
name = "convert_case",
actual = "@raze__convert_case__0_6_0//:convert_case",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "csv",
actual = "@raze__csv__1_1_6//:csv",
@ -489,6 +498,15 @@ alias(
],
)
alias(
name = "snafu",
actual = "@raze__snafu__0_7_2//:snafu",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "strum",
actual = "@raze__strum__0_24_1//:strum",

View file

@ -66,6 +66,15 @@ alias(
],
)
alias(
name = "convert_case",
actual = "@raze__convert_case__0_6_0//:convert_case",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "csv",
actual = "@raze__csv__1_1_6//:csv",
@ -489,6 +498,15 @@ alias(
],
)
alias(
name = "snafu",
actual = "@raze__snafu__0_7_2//:snafu",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "strum",
actual = "@raze__strum__0_24_1//:strum",

View file

@ -66,6 +66,15 @@ alias(
],
)
alias(
name = "convert_case",
actual = "@raze__convert_case__0_6_0//:convert_case",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "csv",
actual = "@raze__csv__1_1_6//:csv",
@ -489,6 +498,15 @@ alias(
],
)
alias(
name = "snafu",
actual = "@raze__snafu__0_7_2//:snafu",
tags = [
"cargo-raze",
"manual",
],
)
alias(
name = "strum",
actual = "@raze__strum__0_24_1//:strum",

View file

@ -65,7 +65,7 @@ impl Collection {
return Ok(home_deck);
}
// default deck
self.get_deck(DeckId(1))?.ok_or(AnkiError::NotFound)
self.get_deck(DeckId(1))?.or_not_found(DeckId(1))
}
fn get_current_notetype_for_adding(&mut self) -> Result<Arc<Notetype>> {
@ -79,7 +79,7 @@ impl Collection {
if let Some((ntid, _)) = self.storage.get_all_notetype_names()?.first() {
Ok(self.get_notetype(*ntid)?.unwrap())
} else {
Err(AnkiError::NotFound)
invalid_input!("collection has no notetypes");
}
}

View file

@ -11,10 +11,11 @@ use crate::{
impl CardsService for Backend {
fn get_card(&self, input: pb::CardId) -> Result<pb::Card> {
let cid = input.into();
self.with_col(|col| {
col.storage
.get_card(input.into())
.and_then(|opt| opt.ok_or(AnkiError::NotFound))
.get_card(cid)
.and_then(|opt| opt.or_not_found(cid))
.map(Into::into)
})
}
@ -67,10 +68,8 @@ impl TryFrom<pb::Card> for Card {
type Error = AnkiError;
fn try_from(c: pb::Card) -> Result<Self, Self::Error> {
let ctype = CardType::try_from(c.ctype as u8)
.map_err(|_| AnkiError::invalid_input("invalid card type"))?;
let queue = CardQueue::try_from(c.queue as i8)
.map_err(|_| AnkiError::invalid_input("invalid card queue"))?;
let ctype = CardType::try_from(c.ctype as u8).or_invalid("invalid card type")?;
let queue = CardQueue::try_from(c.queue as i8).or_invalid("invalid card queue")?;
Ok(Card {
id: CardId(c.id),
note_id: NoteId(c.note_id),

View file

@ -86,11 +86,8 @@ impl CardRenderingService for Backend {
&self,
input: pb::RenderUncommittedCardRequest,
) -> Result<pb::RenderCardResponse> {
let template = input.template.ok_or(AnkiError::NotFound)?.into();
let mut note = input
.note
.ok_or_else(|| AnkiError::invalid_input("missing note"))?
.into();
let template = input.template.or_invalid("missing template")?.into();
let mut note = input.note.or_invalid("missing note")?.into();
let ord = input.card_ord as u16;
let fill_empty = input.fill_empty;
self.with_col(|col| {
@ -105,10 +102,7 @@ impl CardRenderingService for Backend {
) -> Result<pb::RenderCardResponse> {
let schema11: CardTemplateSchema11 = serde_json::from_slice(&input.template)?;
let template = schema11.into();
let mut note = input
.note
.ok_or_else(|| AnkiError::invalid_input("missing note"))?
.into();
let mut note = input.note.or_invalid("missing note")?.into();
let ord = input.card_ord as u16;
let fill_empty = input.fill_empty;
self.with_col(|col| {

View file

@ -57,7 +57,7 @@ impl ConfigService for Backend {
fn get_config_json(&self, input: pb::String) -> Result<pb::Json> {
self.with_col(|col| {
let val: Option<Value> = col.get_config_optional(input.val.as_str());
val.ok_or(AnkiError::NotFound)
val.or_not_found(input.val)
.and_then(|v| serde_json::to_vec(&v).map_err(Into::into))
.map(Into::into)
})

View file

@ -80,21 +80,14 @@ impl DecksService for Backend {
fn get_deck_id_by_name(&self, input: pb::String) -> Result<pb::DeckId> {
self.with_col(|col| {
col.get_deck_id(&input.val).and_then(|d| {
d.ok_or(AnkiError::NotFound)
.map(|d| pb::DeckId { did: d.0 })
})
col.get_deck_id(&input.val)
.and_then(|d| d.or_not_found(input.val).map(|d| pb::DeckId { did: d.0 }))
})
}
fn get_deck(&self, input: pb::DeckId) -> Result<pb::Deck> {
self.with_col(|col| {
Ok(col
.storage
.get_deck(input.into())?
.ok_or(AnkiError::NotFound)?
.into())
})
let did = input.into();
self.with_col(|col| Ok(col.storage.get_deck(did)?.or_not_found(did)?.into()))
}
fn update_deck(&self, input: pb::Deck) -> Result<pb::OpChanges> {
@ -113,12 +106,9 @@ impl DecksService for Backend {
}
fn get_deck_legacy(&self, input: pb::DeckId) -> Result<pb::Json> {
let did = input.into();
self.with_col(|col| {
let deck: DeckSchema11 = col
.storage
.get_deck(input.into())?
.ok_or(AnkiError::NotFound)?
.into();
let deck: DeckSchema11 = col.storage.get_deck(did)?.or_not_found(did)?.into();
serde_json::to_vec(&deck)
.map_err(Into::into)
.map(Into::into)
@ -273,10 +263,7 @@ impl TryFrom<pb::Deck> for Deck {
mtime_secs: TimestampSecs(d.mtime_secs),
usn: Usn(d.usn),
common: d.common.unwrap_or_default(),
kind: d
.kind
.ok_or_else(|| AnkiError::invalid_input("missing kind"))?
.into(),
kind: d.kind.or_invalid("missing kind")?.into(),
})
}
}

View file

@ -9,43 +9,46 @@ use crate::{
};
impl AnkiError {
pub(super) fn into_protobuf(self, tr: &I18n) -> pb::BackendError {
let localized = self.localized_description(tr);
pub fn into_protobuf(self, tr: &I18n) -> pb::BackendError {
let message = self.message(tr);
let help_page = self.help_page().map(|page| page as i32);
let context = self.context();
let backtrace = self.backtrace();
let kind = match self {
AnkiError::InvalidInput(_) => Kind::InvalidInput,
AnkiError::TemplateError(_) => Kind::TemplateParse,
AnkiError::IoError(_) => Kind::IoError,
AnkiError::DbError(_) => Kind::DbError,
AnkiError::NetworkError(_) => Kind::NetworkError,
AnkiError::SyncError(err) => err.kind.into(),
AnkiError::InvalidInput { .. } => Kind::InvalidInput,
AnkiError::TemplateError { .. } => Kind::TemplateParse,
AnkiError::DbError { .. } => Kind::DbError,
AnkiError::NetworkError { .. } => Kind::NetworkError,
AnkiError::SyncError { source } => source.kind.into(),
AnkiError::Interrupted => Kind::Interrupted,
AnkiError::CollectionNotOpen => Kind::InvalidInput,
AnkiError::CollectionAlreadyOpen => Kind::InvalidInput,
AnkiError::JsonError(_) => Kind::JsonError,
AnkiError::ProtoError(_) => Kind::ProtoError,
AnkiError::NotFound => Kind::NotFoundError,
AnkiError::JsonError { .. } => Kind::JsonError,
AnkiError::ProtoError { .. } => Kind::ProtoError,
AnkiError::NotFound { .. } => Kind::NotFoundError,
AnkiError::Deleted => Kind::Deleted,
AnkiError::Existing => Kind::Exists,
AnkiError::FilteredDeckError(_) => Kind::FilteredDeckError,
AnkiError::SearchError(_) => Kind::SearchError,
AnkiError::CardTypeError(_) => Kind::CardTypeError,
AnkiError::FilteredDeckError { .. } => Kind::FilteredDeckError,
AnkiError::SearchError { .. } => Kind::SearchError,
AnkiError::CardTypeError { .. } => Kind::CardTypeError,
AnkiError::ParseNumError => Kind::InvalidInput,
AnkiError::InvalidRegex(_) => Kind::InvalidInput,
AnkiError::InvalidRegex { .. } => Kind::InvalidInput,
AnkiError::UndoEmpty => Kind::UndoEmpty,
AnkiError::MultipleNotetypesSelected => Kind::InvalidInput,
AnkiError::DatabaseCheckRequired => Kind::InvalidInput,
AnkiError::CustomStudyError(_) => Kind::CustomStudyError,
AnkiError::ImportError(_) => Kind::ImportError,
AnkiError::FileIoError(_) => Kind::IoError,
AnkiError::CustomStudyError { .. } => Kind::CustomStudyError,
AnkiError::ImportError { .. } => Kind::ImportError,
AnkiError::FileIoError { .. } => Kind::IoError,
AnkiError::MediaCheckRequired => Kind::InvalidInput,
AnkiError::InvalidId => Kind::InvalidInput,
};
pb::BackendError {
kind: kind as i32,
localized,
message,
help_page,
context,
backtrace,
}
}
}

View file

@ -60,13 +60,7 @@ use self::{
sync::{SyncService, SyncState},
tags::TagsService,
};
use crate::{
backend::dbproxy::db_command_bytes,
collection::Collection,
error::{AnkiError, Result},
i18n::I18n,
log, pb,
};
use crate::{backend::dbproxy::db_command_bytes, log, pb, prelude::*};
pub struct Backend {
col: Arc<Mutex<Option<Collection>>>,
@ -126,7 +120,7 @@ impl Backend {
input: &[u8],
) -> result::Result<Vec<u8>, Vec<u8>> {
pb::ServiceIndex::from_i32(service as i32)
.ok_or_else(|| AnkiError::invalid_input("invalid service"))
.or_invalid("invalid service")
.and_then(|service| match service {
pb::ServiceIndex::Scheduler => SchedulerService::run_method(self, method, input),
pb::ServiceIndex::Decks => DecksService::run_method(self, method, input),

View file

@ -13,15 +13,16 @@ use crate::{
impl NotesService for Backend {
fn new_note(&self, input: pb::NotetypeId) -> Result<pb::Note> {
let ntid = input.into();
self.with_col(|col| {
let nt = col.get_notetype(input.into())?.ok_or(AnkiError::NotFound)?;
let nt = col.get_notetype(ntid)?.or_not_found(ntid)?;
Ok(nt.new_note().into())
})
}
fn add_note(&self, input: pb::AddNoteRequest) -> Result<pb::AddNoteResponse> {
self.with_col(|col| {
let mut note: Note = input.note.ok_or(AnkiError::NotFound)?.into();
let mut note: Note = input.note.or_invalid("no note provided")?.into();
let changes = col.add_note(&mut note, DeckId(input.deck_id))?;
Ok(pb::AddNoteResponse {
note_id: note.id.0,
@ -62,12 +63,8 @@ impl NotesService for Backend {
}
fn get_note(&self, input: pb::NoteId) -> Result<pb::Note> {
self.with_col(|col| {
col.storage
.get_note(input.into())?
.ok_or(AnkiError::NotFound)
.map(Into::into)
})
let nid = input.into();
self.with_col(|col| col.storage.get_note(nid)?.or_not_found(nid).map(Into::into))
}
fn remove_notes(&self, input: pb::RemoveNotesRequest) -> Result<pb::OpChangesWithCount> {

View file

@ -80,21 +80,20 @@ impl NotetypesService for Backend {
}
fn get_notetype(&self, input: pb::NotetypeId) -> Result<pb::Notetype> {
let ntid = input.into();
self.with_col(|col| {
col.storage
.get_notetype(input.into())?
.ok_or(AnkiError::NotFound)
.get_notetype(ntid)?
.or_not_found(ntid)
.map(Into::into)
})
}
fn get_notetype_legacy(&self, input: pb::NotetypeId) -> Result<pb::Json> {
let ntid = input.into();
self.with_col(|col| {
let schema11: NotetypeSchema11 = col
.storage
.get_notetype(input.into())?
.ok_or(AnkiError::NotFound)?
.into();
let schema11: NotetypeSchema11 =
col.storage.get_notetype(ntid)?.or_not_found(ntid)?.into();
Ok(serde_json::to_vec(&schema11)?).map(Into::into)
})
}
@ -131,7 +130,7 @@ impl NotetypesService for Backend {
self.with_col(|col| {
col.storage
.get_notetype_id(&input.val)
.and_then(|nt| nt.ok_or(AnkiError::NotFound))
.and_then(|nt| nt.or_not_found(input.val))
.map(|ntid| pb::NotetypeId { ntid: ntid.0 })
})
}

View file

@ -70,7 +70,7 @@ impl TryFrom<pb::SearchNode> for Node {
Filter::Negated(term) => Node::try_from(*term)?.negated(),
Filter::Group(mut group) => {
match group.nodes.len() {
0 => return Err(AnkiError::invalid_input("empty group")),
0 => invalid_input!("empty group"),
// a group of 1 doesn't need to be a group
1 => group.nodes.pop().unwrap().try_into()?,
// 2+ nodes

View file

@ -236,7 +236,7 @@ impl Collection {
self.state
.active_browser_columns
.as_ref()
.ok_or_else(|| AnkiError::invalid_input("Active browser columns not set."))?,
.or_invalid("Active browser columns not set.")?,
);
RowContext::new(self, id, notes_mode, card_render_required(&columns))?.browser_row(&columns)
}
@ -250,7 +250,7 @@ impl Collection {
} else {
self.storage.get_note_without_fields(id)?
}
.ok_or(AnkiError::NotFound)
.or_not_found(id)
}
}
@ -264,7 +264,7 @@ impl RenderContext {
question: rendered_nodes_to_str(&render.qnodes),
answer_nodes: render.anodes,
},
Err(err) => RenderContext::Err(err.localized_description(&col.tr)),
Err(err) => RenderContext::Err(err.message(&col.tr)),
}
}
@ -312,12 +312,9 @@ impl RowContext {
if notes_mode {
note = col
.get_note_maybe_with_fields(NoteId(id), with_card_render)
.map_err(|e| {
if e == AnkiError::NotFound {
AnkiError::Deleted
} else {
e
}
.map_err(|e| match e {
AnkiError::NotFound { .. } => AnkiError::Deleted,
_ => e,
})?;
cards = col.storage.all_cards_of_note(note.id)?;
if cards.is_empty() {
@ -332,12 +329,14 @@ impl RowContext {
}
let notetype = col
.get_notetype(note.notetype_id)?
.ok_or(AnkiError::NotFound)?;
let deck = col.get_deck(cards[0].deck_id)?.ok_or(AnkiError::NotFound)?;
.or_not_found(note.notetype_id)?;
let deck = col
.get_deck(cards[0].deck_id)?
.or_not_found(cards[0].deck_id)?;
let original_deck = if cards[0].original_deck_id.0 != 0 {
Some(
col.get_deck(cards[0].original_deck_id)?
.ok_or(AnkiError::NotFound)?,
.or_not_found(cards[0].original_deck_id)?,
)
} else {
None

View file

@ -190,7 +190,7 @@ impl Collection {
if undoable {
self.transact(Op::UpdateCard, |col| {
for mut card in cards {
let existing = col.storage.get_card(card.id)?.ok_or(AnkiError::NotFound)?;
let existing = col.storage.get_card(card.id)?.or_not_found(card.id)?;
col.update_card_inner(&mut card, existing, col.usn()?)?
}
Ok(())
@ -198,7 +198,7 @@ impl Collection {
} else {
self.transact_no_undo(|col| {
for mut card in cards {
let existing = col.storage.get_card(card.id)?.ok_or(AnkiError::NotFound)?;
let existing = col.storage.get_card(card.id)?.or_not_found(card.id)?;
col.update_card_inner(&mut card, existing, col.usn()?)?;
}
Ok(OpOutput {
@ -220,10 +220,7 @@ impl Collection {
where
F: FnOnce(&mut Card) -> Result<T>,
{
let orig = self
.storage
.get_card(cid)?
.ok_or_else(|| AnkiError::invalid_input("no such card"))?;
let orig = self.storage.get_card(cid)?.or_invalid("no such card")?;
let mut card = orig.clone();
func(&mut card)?;
self.update_card_inner(&mut card, orig, self.usn()?)?;
@ -242,9 +239,7 @@ impl Collection {
}
pub(crate) fn add_card(&mut self, card: &mut Card) -> Result<()> {
if card.id.0 != 0 {
return Err(AnkiError::invalid_input("card id already set"));
}
require!(card.id.0 == 0, "card id already set");
card.mtime = TimestampSecs::now();
card.usn = self.usn()?;
self.add_card_undoable(card)
@ -271,10 +266,10 @@ impl Collection {
}
pub fn set_deck(&mut self, cards: &[CardId], deck_id: DeckId) -> Result<OpOutput<usize>> {
let deck = self.get_deck(deck_id)?.ok_or(AnkiError::NotFound)?;
let config_id = deck.config_id().ok_or(AnkiError::FilteredDeckError(
FilteredDeckError::CanNotMoveCardsInto,
))?;
let deck = self.get_deck(deck_id)?.or_not_found(deck_id)?;
let config_id = deck.config_id().ok_or(AnkiError::FilteredDeckError {
source: FilteredDeckError::CanNotMoveCardsInto,
})?;
let config = self.get_deck_config(config_id, true)?.unwrap();
let mut steps_adjuster = RemainingStepsAdjuster::new(&config);
let sched = self.scheduler_version();
@ -296,9 +291,7 @@ impl Collection {
}
pub fn set_card_flag(&mut self, cards: &[CardId], flag: u32) -> Result<OpOutput<usize>> {
if flag > 7 {
return Err(AnkiError::invalid_input("invalid flag"));
}
require!(flag < 8, "invalid flag");
let flag = flag as u8;
let usn = self.usn()?;

View file

@ -20,7 +20,7 @@ impl Collection {
let current = self
.storage
.get_card(card.id)?
.ok_or_else(|| AnkiError::invalid_input("card disappeared"))?;
.or_invalid("card disappeared")?;
self.update_card_undoable(&mut *card, current)
}
UndoableCardChange::Removed(card) => self.restore_deleted_card(*card),
@ -44,9 +44,7 @@ impl Collection {
}
pub(super) fn update_card_undoable(&mut self, card: &mut Card, original: Card) -> Result<()> {
if card.id.0 == 0 {
return Err(AnkiError::invalid_input("card id not set"));
}
require!(card.id.0 != 0, "card id not set");
self.save_undo(UndoableCardChange::Updated(Box::new(original)));
self.storage.update_card(card)
}

View file

@ -14,7 +14,8 @@ use itertools::Itertools;
use log::error;
use crate::{
import_export::package::export_colpkg_from_data, log, pb::preferences::BackupLimits, prelude::*,
import_export::package::export_colpkg_from_data, io::read_file, log,
pb::preferences::BackupLimits, prelude::*,
};
const BACKUP_FORMAT_STRING: &str = "backup-%Y-%m-%d-%H.%M.%S.colpkg";
@ -37,7 +38,7 @@ impl Collection {
let log = self.log.clone();
let tr = self.tr.clone();
self.storage.checkpoint()?;
let col_data = std::fs::read(&self.col_path)?;
let col_data = read_file(&self.col_path)?;
self.update_last_backup_timestamp()?;
Ok(Some(thread::spawn(move || {
backup_inner(&col_data, &backup_folder, limits, log, &tr)

View file

@ -23,7 +23,7 @@ impl Collection {
card_ordinal: usize,
key: &str,
) -> Result<String> {
let nt = self.get_notetype(ntid)?.ok_or(AnkiError::NotFound)?;
let nt = self.get_notetype(ntid)?.or_not_found(ntid)?;
let ordinal = if matches!(nt.config.kind(), NotetypeKind::Cloze) {
0
} else {

View file

@ -19,7 +19,7 @@ impl Collection {
let current = self
.storage
.get_config_entry(&entry.key)?
.ok_or_else(|| AnkiError::invalid_input("config disappeared"))?;
.or_invalid("config disappeared")?;
self.update_config_entry_undoable(entry, current)
.map(|_| ())
}

View file

@ -304,10 +304,13 @@ impl Collection {
match self.storage.get_note(nid) {
Ok(note) => Ok(note.unwrap()),
Err(err) => match err {
AnkiError::DbError(DbError {
kind: DbErrorKind::Utf8,
..
}) => {
AnkiError::DbError {
source:
DbError {
kind: DbErrorKind::Utf8,
..
},
} => {
// fix note then fetch again
self.storage.fix_invalid_utf8_in_note(nid)?;
out.invalid_utf8 += 1;

View file

@ -19,14 +19,7 @@ pub use crate::pb::deck_config::{
/// Old deck config and cards table store 250% as 2500.
pub(crate) const INITIAL_EASE_FACTOR_THOUSANDS: u16 = (INITIAL_EASE_FACTOR * 1000.0) as u16;
use crate::{
collection::Collection,
define_newtype,
error::{AnkiError, Result},
scheduler::states::review::INITIAL_EASE_FACTOR,
timestamp::{TimestampMillis, TimestampSecs},
types::Usn,
};
use crate::{define_newtype, prelude::*, scheduler::states::review::INITIAL_EASE_FACTOR};
define_newtype!(DeckConfigId, i64);
@ -121,7 +114,7 @@ impl Collection {
let original = self
.storage
.get_deck_config(config.id)?
.ok_or(AnkiError::NotFound)?;
.or_not_found(config.id)?;
self.update_deck_config_inner(config, original, usn)
}
}
@ -176,13 +169,8 @@ impl Collection {
/// Remove a deck configuration. This will force a full sync.
pub(crate) fn remove_deck_config_inner(&mut self, dcid: DeckConfigId) -> Result<()> {
if dcid.0 == 1 {
return Err(AnkiError::invalid_input("can't delete default conf"));
}
let conf = self
.storage
.get_deck_config(dcid)?
.ok_or(AnkiError::NotFound)?;
require!(dcid.0 != 1, "can't delete default conf");
let conf = self.storage.get_deck_config(dcid)?.or_not_found(dcid)?;
self.set_schema_modified()?;
self.remove_deck_config_undoable(conf)
}

View file

@ -22,7 +22,7 @@ impl Collection {
let current = self
.storage
.get_deck_config(config.id)?
.ok_or_else(|| AnkiError::invalid_input("deck config disappeared"))?;
.or_invalid("deck config disappeared")?;
self.update_deck_config_undoable(&config, current)
}
UndoableDeckConfigChange::Removed(config) => self.restore_deleted_deck_config(*config),

View file

@ -88,7 +88,7 @@ impl Collection {
}
fn get_current_deck_for_update(&mut self, deck: DeckId) -> Result<CurrentDeck> {
let deck = self.get_deck(deck)?.ok_or(AnkiError::NotFound)?;
let deck = self.get_deck(deck)?.or_not_found(deck)?;
let normal = deck.normal()?;
let today = self.timing_today()?.days_elapsed;
@ -119,9 +119,7 @@ impl Collection {
}
fn update_deck_configs_inner(&mut self, mut input: UpdateDeckConfigsRequest) -> Result<()> {
if input.configs.is_empty() {
return Err(AnkiError::invalid_input("config not provided"));
}
require!(!input.configs.is_empty(), "config not provided");
let configs_before_update = self.storage.get_deck_config_map()?;
let mut configs_after_update = configs_before_update.clone();
@ -142,7 +140,7 @@ impl Collection {
let deck = self
.storage
.get_deck(input.target_deck_id)?
.ok_or(AnkiError::NotFound)?;
.or_not_found(input.target_deck_id)?;
self.storage
.child_decks(&deck)?
.iter()

View file

@ -14,7 +14,7 @@ impl Collection {
pub fn update_deck(&mut self, deck: &mut Deck) -> Result<OpOutput<()>> {
self.transact(Op::UpdateDeck, |col| {
let existing_deck = col.storage.get_deck(deck.id)?.ok_or(AnkiError::NotFound)?;
let existing_deck = col.storage.get_deck(deck.id)?.or_not_found(deck.id)?;
col.update_deck_inner(deck, existing_deck, col.usn()?)
})
}
@ -43,9 +43,7 @@ impl Collection {
}
pub(crate) fn add_deck_inner(&mut self, deck: &mut Deck, usn: Usn) -> Result<()> {
if deck.id.0 != 0 {
return Err(AnkiError::invalid_input("deck to add must have id 0"));
}
require!(deck.id.0 == 0, "deck to add must have id 0");
self.prepare_deck_for_update(deck, usn)?;
deck.set_modified(usn);
self.match_or_create_parents(deck, usn)?;
@ -153,9 +151,7 @@ impl Collection {
machine_name: &str,
recursion_level: usize,
) -> Result<Option<Deck>> {
if recursion_level > 10 {
return Err(AnkiError::invalid_input("deck nesting level too deep"));
}
require!(recursion_level < 11, "deck nesting level too deep");
if let Some(parent_name) = immediate_parent_name(machine_name) {
if let Some(parent_did) = self.storage.get_deck_id(parent_name)? {
self.storage.get_deck(parent_did)

View file

@ -44,7 +44,7 @@ impl Collection {
did: DeckId,
) -> Result<pb::CountsForDeckTodayResponse> {
let today = self.current_due_day(0)?;
let mut deck = self.storage.get_deck(did)?.ok_or(AnkiError::NotFound)?;
let mut deck = self.storage.get_deck(did)?.or_not_found(did)?;
deck.reset_stats_if_day_changed(today);
Ok(pb::CountsForDeckTodayResponse {
new: deck.common.new_studied,

View file

@ -16,7 +16,7 @@ impl Collection {
if let Some(deck) = self.get_deck(self.get_current_deck_id())? {
return Ok(deck);
}
self.get_deck(DeckId(1))?.ok_or(AnkiError::NotFound)
self.get_deck(DeckId(1))?.or_not_found(DeckId(1))
}
}

View file

@ -91,19 +91,17 @@ impl Deck {
#[allow(dead_code)]
pub(crate) fn normal(&self) -> Result<&NormalDeck> {
if let DeckKind::Normal(normal) = &self.kind {
Ok(normal)
} else {
Err(AnkiError::invalid_input("deck not normal"))
match &self.kind {
DeckKind::Normal(normal) => Ok(normal),
_ => invalid_input!("deck not normal"),
}
}
#[allow(dead_code)]
pub(crate) fn normal_mut(&mut self) -> Result<&mut NormalDeck> {
if let DeckKind::Normal(normal) = &mut self.kind {
Ok(normal)
} else {
Err(AnkiError::invalid_input("deck not normal"))
match &mut self.kind {
DeckKind::Normal(normal) => Ok(normal),
_ => invalid_input!("deck not normal"),
}
}

View file

@ -111,7 +111,7 @@ impl Collection {
pub fn rename_deck(&mut self, did: DeckId, new_human_name: &str) -> Result<OpOutput<()>> {
self.transact(Op::RenameDeck, |col| {
let existing_deck = col.storage.get_deck(did)?.ok_or(AnkiError::NotFound)?;
let existing_deck = col.storage.get_deck(did)?.or_not_found(did)?;
let mut deck = existing_deck.clone();
deck.name = NativeDeckName::from_human_name(new_human_name);
col.update_deck_inner(&mut deck, existing_deck, col.usn()?)

View file

@ -21,7 +21,7 @@ impl Collection {
let current = self
.storage
.get_deck(deck.id)?
.ok_or_else(|| AnkiError::invalid_input("deck disappeared"))?;
.or_invalid("deck disappeared")?;
self.update_single_deck_undoable(&mut *deck, current)
}
UndoableDeckChange::Removed(deck) => self.restore_deleted_deck(*deck),

View file

@ -5,10 +5,11 @@ use std::str::Utf8Error;
use anki_i18n::I18n;
use rusqlite::{types::FromSqlError, Error};
use snafu::Snafu;
use super::AnkiError;
#[derive(Debug, PartialEq, Eq)]
#[derive(Debug, PartialEq, Eq, Snafu)]
pub struct DbError {
pub info: String,
pub kind: DbErrorKind,
@ -27,10 +28,12 @@ pub enum DbErrorKind {
impl AnkiError {
pub(crate) fn db_error(info: impl Into<String>, kind: DbErrorKind) -> Self {
AnkiError::DbError(DbError {
info: info.into(),
kind,
})
AnkiError::DbError {
source: DbError {
info: info.into(),
kind,
},
}
}
}
@ -38,19 +41,25 @@ impl From<Error> for AnkiError {
fn from(err: Error) -> Self {
if let Error::SqliteFailure(error, Some(reason)) = &err {
if error.code == rusqlite::ErrorCode::DatabaseBusy {
return AnkiError::DbError(DbError {
info: "".to_string(),
kind: DbErrorKind::Locked,
});
return AnkiError::DbError {
source: DbError {
info: "".to_string(),
kind: DbErrorKind::Locked,
},
};
}
if reason.contains("regex parse error") {
return AnkiError::InvalidRegex(reason.to_owned());
return AnkiError::InvalidRegex {
info: reason.to_owned(),
};
}
}
AnkiError::DbError(DbError {
info: format!("{:?}", err),
kind: DbErrorKind::Other,
})
AnkiError::DbError {
source: DbError {
info: format!("{:?}", err),
kind: DbErrorKind::Other,
},
}
}
}
@ -58,21 +67,25 @@ impl From<FromSqlError> for AnkiError {
fn from(err: FromSqlError) -> Self {
if let FromSqlError::Other(ref err) = err {
if let Some(_err) = err.downcast_ref::<Utf8Error>() {
return AnkiError::DbError(DbError {
info: "".to_string(),
kind: DbErrorKind::Utf8,
});
return AnkiError::DbError {
source: DbError {
info: "".to_string(),
kind: DbErrorKind::Utf8,
},
};
}
}
AnkiError::DbError(DbError {
info: format!("{:?}", err),
kind: DbErrorKind::Other,
})
AnkiError::DbError {
source: DbError {
info: format!("{:?}", err),
kind: DbErrorKind::Other,
},
}
}
}
impl DbError {
pub fn localized_description(&self, _tr: &I18n) -> String {
pub fn message(&self, _tr: &I18n) -> String {
match self.kind {
DbErrorKind::Corrupt => self.info.clone(),
// fixme: i18n

View file

@ -0,0 +1,87 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::path::PathBuf;
use snafu::Snafu;
/// Wrapper for [std::io::Error] with additional information on the attempted
/// operation.
#[derive(Debug, Snafu)]
#[snafu(visibility(pub))]
pub struct FileIoError {
pub path: PathBuf,
pub op: FileOp,
pub source: std::io::Error,
}
impl PartialEq for FileIoError {
fn eq(&self, other: &Self) -> bool {
self.path == other.path && self.op == other.op
}
}
impl Eq for FileIoError {}
#[derive(Debug, PartialEq, Clone, Eq)]
pub enum FileOp {
Read,
Open,
Create,
Write,
CopyFrom(PathBuf),
Persist,
Sync,
/// For legacy errors without any context.
Unknown,
}
impl FileOp {
pub fn copy(from: impl Into<PathBuf>) -> Self {
Self::CopyFrom(from.into())
}
}
impl FileIoError {
pub fn message(&self) -> String {
format!(
"Failed to {} '{}':<br>{}",
match &self.op {
FileOp::Unknown => return format!("{}", self.source),
FileOp::Open => "open".into(),
FileOp::Read => "read".into(),
FileOp::Create => "create file in".into(),
FileOp::Write => "write".into(),
FileOp::CopyFrom(p) => format!("copy from '{}' to", p.to_string_lossy()),
FileOp::Persist => "persist".into(),
FileOp::Sync => "sync".into(),
},
self.path.to_string_lossy(),
self.source
)
}
pub(crate) fn is_not_found(&self) -> bool {
self.source.kind() == std::io::ErrorKind::NotFound
}
}
impl From<tempfile::PathPersistError> for FileIoError {
fn from(err: tempfile::PathPersistError) -> Self {
FileIoError {
path: err.path.to_path_buf(),
op: FileOp::Persist,
source: err.error,
}
}
}
impl From<tempfile::PersistError> for FileIoError {
fn from(err: tempfile::PersistError) -> Self {
FileIoError {
path: err.file.path().into(),
op: FileOp::Persist,
source: err.error,
}
}
}

View file

@ -2,10 +2,9 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_i18n::I18n;
use snafu::Snafu;
use super::AnkiError;
#[derive(Debug, PartialEq, Eq)]
#[derive(Debug, PartialEq, Eq, Snafu)]
pub enum FilteredDeckError {
MustBeLeafNode,
CanNotMoveCardsInto,
@ -14,7 +13,7 @@ pub enum FilteredDeckError {
}
impl FilteredDeckError {
pub fn localized_description(&self, tr: &I18n) -> String {
pub fn message(&self, tr: &I18n) -> String {
match self {
FilteredDeckError::MustBeLeafNode => tr.errors_filtered_parent_deck(),
FilteredDeckError::CanNotMoveCardsInto => {
@ -27,20 +26,14 @@ impl FilteredDeckError {
}
}
impl From<FilteredDeckError> for AnkiError {
fn from(e: FilteredDeckError) -> Self {
AnkiError::FilteredDeckError(e)
}
}
#[derive(Debug, PartialEq, Eq)]
#[derive(Debug, PartialEq, Eq, Snafu)]
pub enum CustomStudyError {
NoMatchingCards,
ExistingDeck,
}
impl CustomStudyError {
pub fn localized_description(&self, tr: &I18n) -> String {
pub fn message(&self, tr: &I18n) -> String {
match self {
Self::NoMatchingCards => tr.custom_study_no_cards_matched_the_criteria_you(),
Self::ExistingDeck => tr.custom_study_must_rename_deck(),
@ -48,9 +41,3 @@ impl CustomStudyError {
.into()
}
}
impl From<CustomStudyError> for AnkiError {
fn from(e: CustomStudyError) -> Self {
AnkiError::CustomStudyError(e)
}
}

View file

@ -0,0 +1,99 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use snafu::{Backtrace, OptionExt, ResultExt, Snafu};
use crate::prelude::*;
/// General-purpose error for unexpected [Err]s, [None]s, and other
/// violated constraints.
#[derive(Debug, Snafu)]
#[snafu(visibility(pub), display("{message}"), whatever)]
pub struct InvalidInputError {
pub message: String,
#[snafu(source(from(Box<dyn std::error::Error + Send + Sync>, Some)))]
pub source: Option<Box<dyn std::error::Error + Send + Sync>>,
pub backtrace: Option<Backtrace>,
}
impl InvalidInputError {
pub fn message(&self) -> String {
self.message.clone()
}
pub fn context(&self) -> String {
if let Some(source) = &self.source {
format!("{}", source)
} else {
String::new()
}
}
}
impl PartialEq for InvalidInputError {
fn eq(&self, other: &Self) -> bool {
self.message == other.message
}
}
impl Eq for InvalidInputError {}
/// Allows generating [AnkiError::InvalidInput] from [Option::None] and the
/// typical [core::result::Result::Err].
pub trait OrInvalid {
type Value;
fn or_invalid(self, message: impl Into<String>) -> Result<Self::Value>;
}
impl<T> OrInvalid for Option<T> {
type Value = T;
fn or_invalid(self, message: impl Into<String>) -> Result<T> {
self.whatever_context::<_, InvalidInputError>(message)
.map_err(Into::into)
}
}
impl<T, E: std::error::Error + Send + Sync + 'static> OrInvalid for Result<T, E> {
type Value = T;
fn or_invalid(self, message: impl Into<String>) -> Result<T> {
self.whatever_context::<_, InvalidInputError>(message)
.map_err(Into::into)
}
}
/// Returns an [AnkiError::InvalidInput] with the provided format string and an
/// optional underlying error.
#[macro_export]
macro_rules! invalid_input {
($fmt:literal$(, $($arg:expr),* $(,)?)?) => {
return core::result::Result::Err({ $crate::error::AnkiError::InvalidInput {
source: snafu::FromString::without_source(
format!($fmt$(, $($arg),*)*),
)
}})
};
($source:expr, $fmt:literal$(, $($arg:expr),* $(,)?)?) => {
return core::result::Result::Err({ $crate::error::AnkiError::InvalidInput {
source: snafu::FromString::with_source(
core::convert::Into::into($source),
format!($fmt$(, $($arg),*)*),
)
}})
};
}
/// Returns an [AnkiError::InvalidInput] unless the condition is true.
#[macro_export]
macro_rules! require {
($condition:expr, $fmt:literal$(, $($arg:expr),* $(,)?)?) => {
if !$condition {
return core::result::Result::Err({ $crate::error::AnkiError::InvalidInput {
source: snafu::FromString::without_source(
format!($fmt$(, $($arg),*)*),
)
}});
}
};
}

View file

@ -2,133 +2,161 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
mod db;
mod file_io;
mod filtered;
mod invalid_input;
mod network;
mod not_found;
mod search;
use std::{fmt::Display, io, path::Path};
pub use db::{DbError, DbErrorKind};
pub use filtered::{CustomStudyError, FilteredDeckError};
pub use network::{NetworkError, NetworkErrorKind, SyncError, SyncErrorKind};
pub use search::{ParseError, SearchErrorKind};
use tempfile::PathPersistError;
use snafu::Snafu;
pub use self::{
file_io::{FileIoError, FileIoSnafu, FileOp},
invalid_input::{InvalidInputError, OrInvalid},
not_found::{NotFoundError, OrNotFound},
};
use crate::{i18n::I18n, links::HelpPage};
pub type Result<T, E = AnkiError> = std::result::Result<T, E>;
#[derive(Debug, PartialEq, Eq)]
#[derive(Debug, PartialEq, Eq, Snafu)]
pub enum AnkiError {
InvalidInput(String),
TemplateError(String),
CardTypeError(CardTypeError),
IoError(String),
FileIoError(FileIoError),
DbError(DbError),
NetworkError(NetworkError),
SyncError(SyncError),
JsonError(String),
ProtoError(String),
#[snafu(context(false))]
InvalidInput {
source: InvalidInputError,
},
TemplateError {
info: String,
},
#[snafu(context(false))]
CardTypeError {
source: CardTypeError,
},
#[snafu(context(false))]
FileIoError {
source: FileIoError,
},
#[snafu(context(false))]
DbError {
source: DbError,
},
#[snafu(context(false))]
NetworkError {
source: NetworkError,
},
#[snafu(context(false))]
SyncError {
source: SyncError,
},
JsonError {
info: String,
},
ProtoError {
info: String,
},
ParseNumError,
Interrupted,
CollectionNotOpen,
CollectionAlreadyOpen,
NotFound,
#[snafu(context(false))]
NotFound {
source: NotFoundError,
},
/// Indicates an absent card or note, but (unlike [AnkiError::NotFound]) in
/// a non-critical context like the browser table, where deleted ids are
/// deliberately not removed.
Deleted,
Existing,
FilteredDeckError(FilteredDeckError),
SearchError(SearchErrorKind),
InvalidRegex(String),
#[snafu(context(false))]
FilteredDeckError {
source: FilteredDeckError,
},
#[snafu(context(false))]
SearchError {
source: SearchErrorKind,
},
InvalidRegex {
info: String,
},
UndoEmpty,
MultipleNotetypesSelected,
DatabaseCheckRequired,
MediaCheckRequired,
CustomStudyError(CustomStudyError),
ImportError(ImportError),
#[snafu(context(false))]
CustomStudyError {
source: CustomStudyError,
},
#[snafu(context(false))]
ImportError {
source: ImportError,
},
InvalidId,
}
impl std::error::Error for AnkiError {}
impl Display for AnkiError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self)
}
}
// error helpers
impl AnkiError {
pub(crate) fn invalid_input<S: Into<String>>(s: S) -> AnkiError {
AnkiError::InvalidInput(s.into())
}
pub fn localized_description(&self, tr: &I18n) -> String {
pub fn message(&self, tr: &I18n) -> String {
match self {
AnkiError::SyncError(err) => err.localized_description(tr),
AnkiError::NetworkError(err) => err.localized_description(tr),
AnkiError::TemplateError(info) => {
AnkiError::SyncError { source } => source.message(tr),
AnkiError::NetworkError { source } => source.message(tr),
AnkiError::TemplateError { info: source } => {
// already localized
info.into()
source.into()
}
AnkiError::CardTypeError(err) => {
AnkiError::CardTypeError { source } => {
let header =
tr.card_templates_invalid_template_number(err.ordinal + 1, &err.notetype);
let details = match err.details {
CardTypeErrorDetails::TemplateError | CardTypeErrorDetails::NoSuchField => {
tr.card_templates_see_preview()
}
tr.card_templates_invalid_template_number(source.ordinal + 1, &source.notetype);
let details = match source.source {
CardTypeErrorDetails::TemplateParseError
| CardTypeErrorDetails::NoSuchField => tr.card_templates_see_preview(),
CardTypeErrorDetails::NoFrontField => tr.card_templates_no_front_field(),
CardTypeErrorDetails::Duplicate(i) => tr.card_templates_identical_front(i + 1),
CardTypeErrorDetails::Duplicate { index } => {
tr.card_templates_identical_front(index + 1)
}
CardTypeErrorDetails::MissingCloze => tr.card_templates_missing_cloze(),
CardTypeErrorDetails::ExtraneousCloze => tr.card_templates_extraneous_cloze(),
};
format!("{}<br>{}", header, details)
}
AnkiError::DbError(err) => err.localized_description(tr),
AnkiError::SearchError(kind) => kind.localized_description(tr),
AnkiError::InvalidInput(info) => {
if info.is_empty() {
tr.errors_invalid_input_empty().into()
} else {
tr.errors_invalid_input_details(info.as_str()).into()
}
}
AnkiError::DbError { source } => source.message(tr),
AnkiError::SearchError { source } => source.message(tr),
AnkiError::ParseNumError => tr.errors_parse_number_fail().into(),
AnkiError::FilteredDeckError(err) => err.localized_description(tr),
AnkiError::InvalidRegex(err) => format!("<pre>{}</pre>", err),
AnkiError::FilteredDeckError { source } => source.message(tr),
AnkiError::InvalidRegex { info: source } => format!("<pre>{}</pre>", source),
AnkiError::MultipleNotetypesSelected => tr.errors_multiple_notetypes_selected().into(),
AnkiError::DatabaseCheckRequired => tr.errors_please_check_database().into(),
AnkiError::MediaCheckRequired => tr.errors_please_check_media().into(),
AnkiError::CustomStudyError(err) => err.localized_description(tr),
AnkiError::ImportError(err) => err.localized_description(tr),
AnkiError::CustomStudyError { source } => source.message(tr),
AnkiError::ImportError { source } => source.message(tr),
AnkiError::Deleted => tr.browsing_row_deleted().into(),
AnkiError::InvalidId => tr.errors_invalid_ids().into(),
AnkiError::IoError(_)
| AnkiError::JsonError(_)
| AnkiError::ProtoError(_)
AnkiError::JsonError { .. }
| AnkiError::ProtoError { .. }
| AnkiError::Interrupted
| AnkiError::CollectionNotOpen
| AnkiError::CollectionAlreadyOpen
| AnkiError::NotFound
| AnkiError::Existing
| AnkiError::UndoEmpty => format!("{:?}", self),
AnkiError::FileIoError(err) => {
format!("{}: {}", err.path, err.error)
}
AnkiError::FileIoError { source } => source.message(),
AnkiError::InvalidInput { source } => source.message(),
AnkiError::NotFound { source } => source.message(tr),
}
}
pub fn help_page(&self) -> Option<HelpPage> {
match self {
Self::CardTypeError(CardTypeError { details, .. }) => Some(match details {
CardTypeErrorDetails::TemplateError | CardTypeErrorDetails::NoSuchField => {
Self::CardTypeError {
source: CardTypeError { source, .. },
} => Some(match source {
CardTypeErrorDetails::TemplateParseError | CardTypeErrorDetails::NoSuchField => {
HelpPage::CardTypeTemplateError
}
CardTypeErrorDetails::Duplicate(_) => HelpPage::CardTypeDuplicate,
CardTypeErrorDetails::Duplicate { .. } => HelpPage::CardTypeDuplicate,
CardTypeErrorDetails::NoFrontField => HelpPage::CardTypeNoFrontField,
CardTypeErrorDetails::MissingCloze => HelpPage::CardTypeMissingCloze,
CardTypeErrorDetails::ExtraneousCloze => HelpPage::CardTypeExtraneousCloze,
@ -136,6 +164,31 @@ impl AnkiError {
_ => None,
}
}
pub fn context(&self) -> String {
match self {
Self::InvalidInput { source } => source.context(),
Self::NotFound { source } => source.context(),
_ => String::new(),
}
}
pub fn backtrace(&self) -> String {
match self {
Self::InvalidInput { source } => {
if let Some(bt) = snafu::ErrorCompat::backtrace(source) {
return format!("{bt}");
}
}
Self::NotFound { source } => {
if let Some(bt) = snafu::ErrorCompat::backtrace(source) {
return format!("{bt}");
}
}
_ => (),
}
String::new()
}
}
#[derive(Debug, PartialEq, Eq)]
@ -153,103 +206,100 @@ pub enum TemplateError {
NoSuchConditional(String),
}
impl From<io::Error> for AnkiError {
fn from(err: io::Error) -> Self {
AnkiError::IoError(format!("{:?}", err))
}
}
impl From<serde_json::Error> for AnkiError {
fn from(err: serde_json::Error) -> Self {
AnkiError::JsonError(err.to_string())
AnkiError::JsonError {
info: err.to_string(),
}
}
}
impl From<prost::EncodeError> for AnkiError {
fn from(err: prost::EncodeError) -> Self {
AnkiError::ProtoError(err.to_string())
AnkiError::ProtoError {
info: err.to_string(),
}
}
}
impl From<prost::DecodeError> for AnkiError {
fn from(err: prost::DecodeError) -> Self {
AnkiError::ProtoError(err.to_string())
AnkiError::ProtoError {
info: err.to_string(),
}
}
}
impl From<PathPersistError> for AnkiError {
fn from(e: PathPersistError) -> Self {
AnkiError::IoError(e.to_string())
impl From<tempfile::PathPersistError> for AnkiError {
fn from(e: tempfile::PathPersistError) -> Self {
FileIoError::from(e).into()
}
}
impl From<tempfile::PersistError> for AnkiError {
fn from(e: tempfile::PersistError) -> Self {
FileIoError::from(e).into()
}
}
impl From<regex::Error> for AnkiError {
fn from(err: regex::Error) -> Self {
AnkiError::InvalidRegex(err.to_string())
AnkiError::InvalidRegex {
info: err.to_string(),
}
}
}
impl From<csv::Error> for AnkiError {
fn from(err: csv::Error) -> Self {
AnkiError::InvalidInput(err.to_string())
// stopgap; implicit mapping should be phased out in favor of manual
// context attachment
impl From<std::io::Error> for AnkiError {
fn from(source: std::io::Error) -> Self {
FileIoError {
path: std::path::PathBuf::new(),
op: FileOp::Unknown,
source,
}
.into()
}
}
#[derive(Debug, PartialEq, Eq)]
#[derive(Debug, PartialEq, Eq, Snafu)]
#[snafu(visibility(pub))]
pub struct CardTypeError {
pub notetype: String,
pub ordinal: usize,
pub details: CardTypeErrorDetails,
pub source: CardTypeErrorDetails,
}
#[derive(Debug, PartialEq, Eq)]
#[derive(Debug, PartialEq, Eq, Snafu)]
#[snafu(visibility(pub))]
pub enum CardTypeErrorDetails {
TemplateError,
Duplicate(usize),
TemplateParseError,
Duplicate { index: usize },
NoFrontField,
NoSuchField,
MissingCloze,
ExtraneousCloze,
}
#[derive(Debug, PartialEq, Eq, Clone)]
#[derive(Debug, PartialEq, Eq, Clone, Snafu)]
pub enum ImportError {
Corrupt,
TooNew,
MediaImportFailed(String),
MediaImportFailed { info: String },
NoFieldColumn,
}
impl ImportError {
fn localized_description(&self, tr: &I18n) -> String {
fn message(&self, tr: &I18n) -> String {
match self {
ImportError::Corrupt => tr.importing_the_provided_file_is_not_a(),
ImportError::TooNew => tr.errors_collection_too_new(),
ImportError::MediaImportFailed(err) => tr.importing_failed_to_import_media_file(err),
ImportError::MediaImportFailed { info } => {
tr.importing_failed_to_import_media_file(info)
}
ImportError::NoFieldColumn => tr.importing_file_must_contain_field_column(),
}
.into()
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct FileIoError {
pub path: String,
pub error: String,
}
impl AnkiError {
pub(crate) fn file_io_error<P: AsRef<Path>>(err: std::io::Error, path: P) -> Self {
AnkiError::FileIoError(FileIoError::new(err, path.as_ref()))
}
}
impl FileIoError {
pub fn new(err: std::io::Error, path: &Path) -> FileIoError {
FileIoError {
path: path.to_string_lossy().to_string(),
error: err.to_string(),
}
}
}

View file

@ -3,10 +3,11 @@
use anki_i18n::I18n;
use reqwest::StatusCode;
use snafu::Snafu;
use super::AnkiError;
#[derive(Debug, PartialEq, Eq)]
#[derive(Debug, PartialEq, Eq, Snafu)]
pub struct NetworkError {
pub info: String,
pub kind: NetworkErrorKind,
@ -20,7 +21,7 @@ pub enum NetworkErrorKind {
Other,
}
#[derive(Debug, PartialEq, Eq)]
#[derive(Debug, PartialEq, Eq, Snafu)]
pub struct SyncError {
pub info: String,
pub kind: SyncErrorKind,
@ -43,10 +44,12 @@ pub enum SyncErrorKind {
impl AnkiError {
pub(crate) fn sync_error(info: impl Into<String>, kind: SyncErrorKind) -> Self {
AnkiError::SyncError(SyncError {
info: info.into(),
kind,
})
AnkiError::SyncError {
source: SyncError {
info: info.into(),
kind,
},
}
}
pub(crate) fn server_message<S: Into<String>>(msg: S) -> AnkiError {
@ -62,10 +65,12 @@ impl From<reqwest::Error> for AnkiError {
let info = str_err.replace(url, "");
if err.is_timeout() {
AnkiError::NetworkError(NetworkError {
info,
kind: NetworkErrorKind::Timeout,
})
AnkiError::NetworkError {
source: NetworkError {
info,
kind: NetworkErrorKind::Timeout,
},
}
} else if err.is_status() {
error_for_status_code(info, err.status().unwrap())
} else {
@ -77,36 +82,50 @@ impl From<reqwest::Error> for AnkiError {
fn error_for_status_code(info: String, code: StatusCode) -> AnkiError {
use reqwest::StatusCode as S;
match code {
S::PROXY_AUTHENTICATION_REQUIRED => AnkiError::NetworkError(NetworkError {
info,
kind: NetworkErrorKind::ProxyAuth,
}),
S::CONFLICT => AnkiError::SyncError(SyncError {
info,
kind: SyncErrorKind::Conflict,
}),
S::FORBIDDEN => AnkiError::SyncError(SyncError {
info,
kind: SyncErrorKind::AuthFailed,
}),
S::NOT_IMPLEMENTED => AnkiError::SyncError(SyncError {
info,
kind: SyncErrorKind::ClientTooOld,
}),
S::INTERNAL_SERVER_ERROR | S::BAD_GATEWAY | S::GATEWAY_TIMEOUT | S::SERVICE_UNAVAILABLE => {
AnkiError::SyncError(SyncError {
S::PROXY_AUTHENTICATION_REQUIRED => AnkiError::NetworkError {
source: NetworkError {
info,
kind: SyncErrorKind::ServerError,
})
kind: NetworkErrorKind::ProxyAuth,
},
},
S::CONFLICT => AnkiError::SyncError {
source: SyncError {
info,
kind: SyncErrorKind::Conflict,
},
},
S::FORBIDDEN => AnkiError::SyncError {
source: SyncError {
info,
kind: SyncErrorKind::AuthFailed,
},
},
S::NOT_IMPLEMENTED => AnkiError::SyncError {
source: SyncError {
info,
kind: SyncErrorKind::ClientTooOld,
},
},
S::INTERNAL_SERVER_ERROR | S::BAD_GATEWAY | S::GATEWAY_TIMEOUT | S::SERVICE_UNAVAILABLE => {
AnkiError::SyncError {
source: SyncError {
info,
kind: SyncErrorKind::ServerError,
},
}
}
S::BAD_REQUEST => AnkiError::SyncError(SyncError {
info,
kind: SyncErrorKind::DatabaseCheckRequired,
}),
_ => AnkiError::NetworkError(NetworkError {
info,
kind: NetworkErrorKind::Other,
}),
S::BAD_REQUEST => AnkiError::SyncError {
source: SyncError {
info,
kind: SyncErrorKind::DatabaseCheckRequired,
},
},
_ => AnkiError::NetworkError {
source: NetworkError {
info,
kind: NetworkErrorKind::Other,
},
},
}
}
@ -131,7 +150,9 @@ fn guess_reqwest_error(mut info: String) -> AnkiError {
NetworkErrorKind::Other
};
AnkiError::NetworkError(NetworkError { info, kind })
AnkiError::NetworkError {
source: NetworkError { info, kind },
}
}
impl From<zip::result::ZipError> for AnkiError {
@ -141,7 +162,7 @@ impl From<zip::result::ZipError> for AnkiError {
}
impl SyncError {
pub fn localized_description(&self, tr: &I18n) -> String {
pub fn message(&self, tr: &I18n) -> String {
match self.kind {
SyncErrorKind::ServerMessage => self.info.clone().into(),
SyncErrorKind::Other => self.info.clone().into(),
@ -160,7 +181,7 @@ impl SyncError {
}
impl NetworkError {
pub fn localized_description(&self, tr: &I18n) -> String {
pub fn message(&self, tr: &I18n) -> String {
let summary = match self.kind {
NetworkErrorKind::Offline => tr.network_offline(),
NetworkErrorKind::Timeout => tr.network_timeout(),

View file

@ -0,0 +1,75 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::{any, fmt};
use convert_case::{Case, Casing};
use snafu::{Backtrace, OptionExt, Snafu};
use crate::prelude::*;
/// Something was unexpectedly missing from the database.
#[derive(Debug, Snafu)]
#[snafu(visibility(pub))]
pub struct NotFoundError {
pub type_name: String,
pub identifier: String,
pub backtrace: Option<Backtrace>,
}
impl NotFoundError {
pub fn message(&self, tr: &I18n) -> String {
tr.errors_inconsistent_db_state().into()
}
pub fn context(&self) -> String {
format!("No such {}: '{}'", self.type_name, self.identifier)
}
}
impl PartialEq for NotFoundError {
fn eq(&self, other: &Self) -> bool {
self.type_name == other.type_name && self.identifier == other.identifier
}
}
impl Eq for NotFoundError {}
/// Allows generating [AnkiError::NotFound] from [Option::None].
pub trait OrNotFound {
type Value;
fn or_not_found(self, identifier: impl fmt::Display) -> Result<Self::Value>;
}
impl<T> OrNotFound for Option<T> {
type Value = T;
fn or_not_found(self, identifier: impl fmt::Display) -> Result<Self::Value> {
self.with_context(|| NotFoundSnafu {
type_name: unqualified_lowercase_type_name::<Self::Value>(),
identifier: format!("{identifier}"),
})
.map_err(Into::into)
}
}
fn unqualified_lowercase_type_name<T: ?Sized>() -> String {
any::type_name::<T>()
.split("::")
.last()
.unwrap_or_default()
.to_case(Case::Lower)
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_unqualified_lowercase_type_name() {
assert_eq!(
unqualified_lowercase_type_name::<crate::card::CardId>(),
"card id"
);
}
}

View file

@ -5,6 +5,7 @@ use std::num::ParseIntError;
use anki_i18n::I18n;
use nom::error::{ErrorKind as NomErrorKind, ParseError as NomParseError};
use snafu::Snafu;
use super::AnkiError;
@ -14,7 +15,7 @@ pub enum ParseError<'a> {
Nom(&'a str, NomErrorKind),
}
#[derive(Debug, PartialEq, Eq)]
#[derive(Debug, PartialEq, Eq, Snafu)]
pub enum SearchErrorKind {
MisplacedAnd,
MisplacedOr,
@ -24,24 +25,26 @@ pub enum SearchErrorKind {
EmptyQuote,
UnclosedQuote,
MissingKey,
UnknownEscape(String),
InvalidState(String),
UnknownEscape { provided: String },
InvalidState { provided: String },
InvalidFlag,
InvalidPropProperty(String),
InvalidPropOperator(String),
InvalidPropProperty { provided: String },
InvalidPropOperator { provided: String },
InvalidNumber { provided: String, context: String },
InvalidWholeNumber { provided: String, context: String },
InvalidPositiveWholeNumber { provided: String, context: String },
InvalidNegativeWholeNumber { provided: String, context: String },
InvalidAnswerButton { provided: String, context: String },
Other(Option<String>),
Other { info: Option<String> },
}
impl From<ParseError<'_>> for AnkiError {
fn from(err: ParseError) -> Self {
match err {
ParseError::Anki(_, kind) => AnkiError::SearchError(kind),
ParseError::Nom(_, _) => AnkiError::SearchError(SearchErrorKind::Other(None)),
ParseError::Anki(_, kind) => AnkiError::SearchError { source: kind },
ParseError::Nom(_, _) => AnkiError::SearchError {
source: SearchErrorKind::Other { info: None },
},
}
}
}
@ -51,7 +54,9 @@ impl From<nom::Err<ParseError<'_>>> for AnkiError {
match err {
nom::Err::Error(e) => e.into(),
nom::Err::Failure(e) => e.into(),
nom::Err::Incomplete(_) => AnkiError::SearchError(SearchErrorKind::Other(None)),
nom::Err::Incomplete(_) => AnkiError::SearchError {
source: SearchErrorKind::Other { info: None },
},
}
}
}
@ -73,7 +78,7 @@ impl From<ParseIntError> for AnkiError {
}
impl SearchErrorKind {
pub fn localized_description(&self, tr: &I18n) -> String {
pub fn message(&self, tr: &I18n) -> String {
let reason = match self {
SearchErrorKind::MisplacedAnd => tr.search_misplaced_and(),
SearchErrorKind::MisplacedOr => tr.search_misplaced_or(),
@ -83,20 +88,22 @@ impl SearchErrorKind {
SearchErrorKind::EmptyQuote => tr.search_empty_quote(),
SearchErrorKind::UnclosedQuote => tr.search_unclosed_quote(),
SearchErrorKind::MissingKey => tr.search_missing_key(),
SearchErrorKind::UnknownEscape(ctx) => tr.search_unknown_escape(ctx.replace('`', "'")),
SearchErrorKind::InvalidState(state) => {
tr.search_invalid_argument("is:", state.replace('`', "'"))
SearchErrorKind::UnknownEscape { provided } => {
tr.search_unknown_escape(provided.replace('`', "'"))
}
SearchErrorKind::InvalidState { provided } => {
tr.search_invalid_argument("is:", provided.replace('`', "'"))
}
SearchErrorKind::InvalidFlag => tr.search_invalid_flag_2(),
SearchErrorKind::InvalidPropProperty(prop) => {
tr.search_invalid_argument("prop:", prop.replace('`', "'"))
SearchErrorKind::InvalidPropProperty { provided } => {
tr.search_invalid_argument("prop:", provided.replace('`', "'"))
}
SearchErrorKind::InvalidPropOperator(ctx) => {
tr.search_invalid_prop_operator(ctx.as_str())
SearchErrorKind::InvalidPropOperator { provided } => {
tr.search_invalid_prop_operator(provided.as_str())
}
SearchErrorKind::Other(Some(info)) => info.into(),
SearchErrorKind::Other(None) => tr.search_invalid_other(),
SearchErrorKind::Other { info: Some(info) } => info.into(),
SearchErrorKind::Other { info: None } => tr.search_invalid_other(),
SearchErrorKind::InvalidNumber { provided, context } => {
tr.search_invalid_number(context.replace('`', "'"), provided.replace('`', "'"))
}

View file

@ -258,7 +258,7 @@ impl Collection {
.map(|config_id| {
self.storage
.get_deck_config(config_id)?
.ok_or(AnkiError::NotFound)
.or_not_found(config_id)
})
.collect()
}

View file

@ -6,8 +6,6 @@ use std::{
path::{Path, PathBuf},
};
use tempfile::NamedTempFile;
use crate::{
collection::CollectionBuilder,
import_export::{
@ -18,7 +16,7 @@ use crate::{
},
ExportProgress, IncrementableProgress,
},
io::{atomic_rename, tempfile_in_parent_of},
io::{atomic_rename, new_tempfile, new_tempfile_in_parent_of},
prelude::*,
};
@ -37,12 +35,12 @@ impl Collection {
) -> Result<usize> {
let mut progress = IncrementableProgress::new(progress_fn);
progress.call(ExportProgress::File)?;
let temp_apkg = tempfile_in_parent_of(out_path.as_ref())?;
let mut temp_col = NamedTempFile::new()?;
let temp_apkg = new_tempfile_in_parent_of(out_path.as_ref())?;
let mut temp_col = new_tempfile()?;
let temp_col_path = temp_col
.path()
.to_str()
.ok_or_else(|| AnkiError::IoError("tempfile with non-unicode name".into()))?;
.or_invalid("non-unicode filename")?;
let meta = if legacy {
Meta::new_legacy()
} else {

View file

@ -143,7 +143,7 @@ impl DeckContext<'_> {
} else if let (Ok(new), Ok(old)) = (new_deck.filtered_mut(), deck.filtered()) {
*new = old.clone();
} else {
return Err(AnkiError::invalid_input("decks have different kinds"));
invalid_input!("decks have different kinds");
}
self.imported_decks.insert(deck.id, new_deck.id);
self.target_col

View file

@ -7,6 +7,7 @@ use zip::ZipArchive;
use super::Context;
use crate::{
error::{FileIoSnafu, FileOp},
import_export::{
package::{
colpkg::export::MediaCopier,
@ -122,7 +123,10 @@ impl SafeMediaEntry {
fn ensure_sha1_set(&mut self, archive: &mut ZipArchive<File>) -> Result<()> {
if self.sha1.is_none() {
let mut reader = self.fetch_file(archive)?;
self.sha1 = Some(sha1_of_reader(&mut reader)?);
self.sha1 = Some(sha1_of_reader(&mut reader).context(FileIoSnafu {
path: &self.name,
op: FileOp::Read,
})?);
}
Ok(())
}

View file

@ -6,19 +6,20 @@ mod decks;
mod media;
mod notes;
use std::{collections::HashSet, fs::File, io, path::Path};
use std::{collections::HashSet, fs::File, path::Path};
pub(crate) use notes::NoteMeta;
use rusqlite::OptionalExtension;
use tempfile::NamedTempFile;
use zip::ZipArchive;
use zstd::stream::copy_decode;
use crate::{
collection::CollectionBuilder,
error::{FileIoSnafu, FileOp},
import_export::{
gather::ExchangeData, package::Meta, ImportProgress, IncrementableProgress, NoteLog,
},
io::{new_tempfile, open_file},
media::MediaManager,
prelude::*,
search::SearchNode,
@ -40,7 +41,7 @@ impl Collection {
path: impl AsRef<Path>,
progress_fn: impl 'static + FnMut(ImportProgress, bool) -> bool,
) -> Result<OpOutput<NoteLog>> {
let file = File::open(path)?;
let file = open_file(path)?;
let archive = ZipArchive::new(file)?;
self.transact(Op::Import, |col| {
@ -134,13 +135,12 @@ impl ExchangeData {
fn collection_to_tempfile(meta: &Meta, archive: &mut ZipArchive<File>) -> Result<NamedTempFile> {
let mut zip_file = archive.by_name(meta.collection_filename())?;
let mut tempfile = NamedTempFile::new()?;
if meta.zstd_compressed() {
copy_decode(zip_file, &mut tempfile)
} else {
io::copy(&mut zip_file, &mut tempfile).map(|_| ())
}
.map_err(|err| AnkiError::file_io_error(err, tempfile.path()))?;
let mut tempfile = new_tempfile()?;
meta.copy(&mut zip_file, &mut tempfile)
.with_context(|_| FileIoSnafu {
path: tempfile.path(),
op: FileOp::copy(zip_file.name()),
})?;
Ok(tempfile)
}

View file

@ -210,16 +210,11 @@ impl<'n> NoteContext<'n> {
}
fn get_expected_notetype(&mut self, ntid: NotetypeId) -> Result<Arc<Notetype>> {
self.target_col
.get_notetype(ntid)?
.ok_or(AnkiError::NotFound)
self.target_col.get_notetype(ntid)?.or_not_found(ntid)
}
fn get_expected_note(&mut self, nid: NoteId) -> Result<Note> {
self.target_col
.storage
.get_note(nid)?
.ok_or(AnkiError::NotFound)
self.target_col.storage.get_note(nid)?.or_not_found(nid)
}
fn maybe_update_note(&mut self, note: Note, meta: NoteMeta) -> Result<()> {

View file

@ -23,7 +23,7 @@ use super::super::{MediaEntries, MediaEntry, Meta, Version};
use crate::{
collection::CollectionBuilder,
import_export::{ExportProgress, IncrementableProgress},
io::{atomic_rename, read_dir_files, tempfile_in_parent_of},
io::{atomic_rename, new_tempfile, new_tempfile_in_parent_of, open_file, read_dir_files},
media::files::filename_if_normalized,
prelude::*,
storage::SchemaVersion,
@ -45,7 +45,7 @@ impl Collection {
let mut progress = IncrementableProgress::new(progress_fn);
progress.call(ExportProgress::File)?;
let colpkg_name = out_path.as_ref();
let temp_colpkg = tempfile_in_parent_of(colpkg_name)?;
let temp_colpkg = new_tempfile_in_parent_of(colpkg_name)?;
let src_path = self.col_path.clone();
let src_media_folder = if include_media {
Some(self.media_folder.clone())
@ -67,7 +67,9 @@ impl Collection {
&tr,
&mut progress,
)?;
atomic_rename(temp_colpkg, colpkg_name, true)
atomic_rename(temp_colpkg, colpkg_name, true)?;
Ok(())
}
}
@ -113,7 +115,7 @@ fn export_collection_file(
} else {
Meta::new()
};
let mut col_file = File::open(col_path)?;
let mut col_file = open_file(col_path)?;
let col_size = col_file.metadata()?.len() as usize;
let media = if let Some(path) = media_dir {
MediaIter::from_folder(&path)?
@ -199,7 +201,7 @@ fn write_dummy_collection(zip: &mut ZipWriter<File>, tr: &I18n) -> Result<()> {
}
fn create_dummy_collection_file(tr: &I18n) -> Result<NamedTempFile> {
let tempfile = NamedTempFile::new()?;
let tempfile = new_tempfile()?;
let mut dummy_col = CollectionBuilder::new(tempfile.path()).build()?;
dummy_col.add_dummy_note(tr)?;
dummy_col
@ -290,10 +292,8 @@ fn write_media_files(
zip.start_file(index.to_string(), file_options_stored())?;
let mut file = File::open(&path)?;
let file_name = path
.file_name()
.ok_or_else(|| AnkiError::invalid_input("not a file path"))?;
let mut file = open_file(&path)?;
let file_name = path.file_name().or_invalid("not a file path")?;
let name = normalized_unicode_file_name(file_name)?;
let (size, sha1) = copier.copy(&mut file, zip)?;
@ -304,12 +304,7 @@ fn write_media_files(
}
fn normalized_unicode_file_name(filename: &OsStr) -> Result<String> {
let filename = filename.to_str().ok_or_else(|| {
AnkiError::IoError(format!(
"non-unicode file name: {}",
filename.to_string_lossy()
))
})?;
let filename = filename.to_str().or_invalid("non-unicode filename")?;
filename_if_normalized(filename)
.map(Cow::into_owned)
.ok_or(AnkiError::MediaCheckRequired)

View file

@ -12,7 +12,7 @@ use zstd::{self, stream::copy_decode};
use crate::{
collection::CollectionBuilder,
error::ImportError,
error::{FileIoSnafu, FileOp, ImportError},
import_export::{
package::{
media::{extract_media_entries, SafeMediaEntry},
@ -20,7 +20,7 @@ use crate::{
},
ImportProgress, IncrementableProgress,
},
io::{atomic_rename, tempfile_in_parent_of},
io::{atomic_rename, create_dir_all, new_tempfile_in_parent_of, open_file},
media::MediaManager,
prelude::*,
};
@ -36,9 +36,9 @@ pub fn import_colpkg(
let mut progress = IncrementableProgress::new(progress_fn);
progress.call(ImportProgress::File)?;
let col_path = PathBuf::from(target_col_path);
let mut tempfile = tempfile_in_parent_of(&col_path)?;
let mut tempfile = new_tempfile_in_parent_of(&col_path)?;
let backup_file = File::open(colpkg_path)?;
let backup_file = open_file(colpkg_path)?;
let mut archive = ZipArchive::new(backup_file)?;
let meta = Meta::from_archive(&mut archive)?;
@ -56,7 +56,9 @@ pub fn import_colpkg(
log,
)?;
atomic_rename(tempfile, &col_path, true)
atomic_rename(tempfile, &col_path, true)?;
Ok(())
}
fn check_collection_and_mod_schema(col_path: &Path) -> Result<()> {
@ -72,7 +74,9 @@ fn check_collection_and_mod_schema(col_path: &Path) -> Result<()> {
.ok()
})
.and_then(|s| (s == "ok").then_some(()))
.ok_or(AnkiError::ImportError(ImportError::Corrupt))
.ok_or(AnkiError::ImportError {
source: ImportError::Corrupt,
})
}
fn restore_media(
@ -88,7 +92,7 @@ fn restore_media(
return Ok(());
}
std::fs::create_dir_all(media_folder)?;
create_dir_all(media_folder)?;
let media_manager = MediaManager::new(media_folder, media_db)?;
let mut media_comparer = MediaComparer::new(meta, progress, &media_manager, log)?;
@ -123,16 +127,14 @@ fn maybe_restore_media_file(
}
fn restore_media_file(meta: &Meta, zip_file: &mut ZipFile, path: &Path) -> Result<()> {
let mut tempfile = tempfile_in_parent_of(path)?;
if meta.zstd_compressed() {
copy_decode(zip_file, &mut tempfile)
} else {
io::copy(zip_file, &mut tempfile).map(|_| ())
}
.map_err(|err| AnkiError::file_io_error(err, path))?;
atomic_rename(tempfile, path, false)
let mut tempfile = new_tempfile_in_parent_of(path)?;
meta.copy(zip_file, &mut tempfile)
.with_context(|_| FileIoSnafu {
path: tempfile.path(),
op: FileOp::copy(zip_file.name()),
})?;
atomic_rename(tempfile, path, false)?;
Ok(())
}
fn copy_collection(
@ -140,9 +142,12 @@ fn copy_collection(
writer: &mut impl Write,
meta: &Meta,
) -> Result<()> {
let mut file = archive
.by_name(meta.collection_filename())
.map_err(|_| AnkiError::ImportError(ImportError::Corrupt))?;
let mut file =
archive
.by_name(meta.collection_filename())
.map_err(|_| AnkiError::ImportError {
source: ImportError::Corrupt,
})?;
if !meta.zstd_compressed() {
io::copy(&mut file, writer)?;
} else {

Some files were not shown because too many files have changed in this diff Show more