mirror of
https://github.com/ankitects/anki.git
synced 2025-09-18 22:12:21 -04:00
Add must_use annotations to generated protobufs
This commit is contained in:
parent
14bc02b431
commit
f032242b65
9 changed files with 158 additions and 6 deletions
7
Cargo.lock
generated
7
Cargo.lock
generated
|
@ -180,6 +180,7 @@ dependencies = [
|
||||||
name = "anki_io"
|
name = "anki_io"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"camino",
|
||||||
"snafu",
|
"snafu",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
]
|
]
|
||||||
|
@ -214,10 +215,16 @@ dependencies = [
|
||||||
name = "anki_proto_gen"
|
name = "anki_proto_gen"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"anki_io",
|
||||||
|
"anyhow",
|
||||||
|
"camino",
|
||||||
"inflections",
|
"inflections",
|
||||||
"itertools",
|
"itertools",
|
||||||
|
"once_cell",
|
||||||
"prost-reflect",
|
"prost-reflect",
|
||||||
"prost-types",
|
"prost-types",
|
||||||
|
"regex",
|
||||||
|
"walkdir",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
|
@ -332,6 +332,15 @@
|
||||||
"license_file": null,
|
"license_file": null,
|
||||||
"description": "Types and traits for working with bytes"
|
"description": "Types and traits for working with bytes"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "camino",
|
||||||
|
"version": "1.1.4",
|
||||||
|
"authors": "Without Boats <saoirse@without.boats>|Ashley Williams <ashley666ashley@gmail.com>|Steve Klabnik <steve@steveklabnik.com>|Rain <rain@sunshowers.io>",
|
||||||
|
"repository": "https://github.com/camino-rs/camino",
|
||||||
|
"license": "Apache-2.0 OR MIT",
|
||||||
|
"license_file": null,
|
||||||
|
"description": "UTF-8 paths"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "cc",
|
"name": "cc",
|
||||||
"version": "1.0.79",
|
"version": "1.0.79",
|
||||||
|
@ -2078,6 +2087,15 @@
|
||||||
"license_file": null,
|
"license_file": null,
|
||||||
"description": "Fast floating point to string conversion"
|
"description": "Fast floating point to string conversion"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "same-file",
|
||||||
|
"version": "1.0.6",
|
||||||
|
"authors": "Andrew Gallant <jamslam@gmail.com>",
|
||||||
|
"repository": "https://github.com/BurntSushi/same-file",
|
||||||
|
"license": "MIT OR Unlicense",
|
||||||
|
"license_file": null,
|
||||||
|
"description": "A simple crate for determining whether two file paths point to the same file."
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "schannel",
|
"name": "schannel",
|
||||||
"version": "0.1.21",
|
"version": "0.1.21",
|
||||||
|
@ -2915,6 +2933,15 @@
|
||||||
"license_file": null,
|
"license_file": null,
|
||||||
"description": "Convert closures into wakers"
|
"description": "Convert closures into wakers"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "walkdir",
|
||||||
|
"version": "2.3.3",
|
||||||
|
"authors": "Andrew Gallant <jamslam@gmail.com>",
|
||||||
|
"repository": "https://github.com/BurntSushi/walkdir",
|
||||||
|
"license": "MIT OR Unlicense",
|
||||||
|
"license_file": null,
|
||||||
|
"description": "Recursively walk a directory."
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "want",
|
"name": "want",
|
||||||
"version": "0.3.0",
|
"version": "0.3.0",
|
||||||
|
|
|
@ -9,5 +9,6 @@ rust-version.workspace = true
|
||||||
description = "Utils for better I/O error reporting"
|
description = "Utils for better I/O error reporting"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
camino.workspace = true
|
||||||
snafu.workspace = true
|
snafu.workspace = true
|
||||||
tempfile.workspace = true
|
tempfile.workspace = true
|
||||||
|
|
|
@ -34,6 +34,7 @@ pub enum FileOp {
|
||||||
Persist,
|
Persist,
|
||||||
Sync,
|
Sync,
|
||||||
Metadata,
|
Metadata,
|
||||||
|
DecodeUtf8Filename,
|
||||||
/// For legacy errors without any context.
|
/// For legacy errors without any context.
|
||||||
Unknown,
|
Unknown,
|
||||||
}
|
}
|
||||||
|
@ -59,6 +60,7 @@ impl FileIoError {
|
||||||
FileOp::Persist => "persist".into(),
|
FileOp::Persist => "persist".into(),
|
||||||
FileOp::Sync => "sync".into(),
|
FileOp::Sync => "sync".into(),
|
||||||
FileOp::Metadata => "get metadata".into(),
|
FileOp::Metadata => "get metadata".into(),
|
||||||
|
FileOp::DecodeUtf8Filename => "decode utf8 filename".into(),
|
||||||
},
|
},
|
||||||
self.path.to_string_lossy(),
|
self.path.to_string_lossy(),
|
||||||
self.source
|
self.source
|
||||||
|
|
|
@ -8,7 +8,10 @@ use std::io::Read;
|
||||||
use std::io::Seek;
|
use std::io::Seek;
|
||||||
use std::path::Component;
|
use std::path::Component;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use camino::Utf8Path;
|
||||||
|
use camino::Utf8PathBuf;
|
||||||
use snafu::ResultExt;
|
use snafu::ResultExt;
|
||||||
use tempfile::NamedTempFile;
|
use tempfile::NamedTempFile;
|
||||||
|
|
||||||
|
@ -215,7 +218,8 @@ impl Iterator for ReadDirFiles {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write_file_if_changed(path: impl AsRef<Path>, contents: impl AsRef<[u8]>) -> Result<()> {
|
/// True if changed.
|
||||||
|
pub fn write_file_if_changed(path: impl AsRef<Path>, contents: impl AsRef<[u8]>) -> Result<bool> {
|
||||||
let path = path.as_ref();
|
let path = path.as_ref();
|
||||||
let contents = contents.as_ref();
|
let contents = contents.as_ref();
|
||||||
let changed = {
|
let changed = {
|
||||||
|
@ -225,8 +229,38 @@ pub fn write_file_if_changed(path: impl AsRef<Path>, contents: impl AsRef<[u8]>)
|
||||||
};
|
};
|
||||||
if changed {
|
if changed {
|
||||||
write_file(path, contents)?;
|
write_file(path, contents)?;
|
||||||
|
Ok(true)
|
||||||
|
} else {
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait ToUtf8PathBuf {
|
||||||
|
fn utf8(self) -> Result<Utf8PathBuf>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToUtf8PathBuf for PathBuf {
|
||||||
|
fn utf8(self) -> Result<Utf8PathBuf> {
|
||||||
|
Utf8PathBuf::from_path_buf(self).map_err(|path| FileIoError {
|
||||||
|
path,
|
||||||
|
op: FileOp::DecodeUtf8Filename,
|
||||||
|
source: std::io::Error::from(std::io::ErrorKind::InvalidData),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait ToUtf8Path {
|
||||||
|
fn utf8(&self) -> Result<&Utf8Path>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToUtf8Path for Path {
|
||||||
|
fn utf8(&self) -> Result<&Utf8Path> {
|
||||||
|
Utf8Path::from_path(self).ok_or_else(|| FileIoError {
|
||||||
|
path: self.into(),
|
||||||
|
op: FileOp::DecodeUtf8Filename,
|
||||||
|
source: std::io::Error::from(std::io::ErrorKind::InvalidData),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
@ -8,6 +8,8 @@ use std::path::PathBuf;
|
||||||
use anki_io::create_dir_all;
|
use anki_io::create_dir_all;
|
||||||
use anki_io::read_file;
|
use anki_io::read_file;
|
||||||
use anki_io::write_file_if_changed;
|
use anki_io::write_file_if_changed;
|
||||||
|
use anki_proto_gen::add_must_use_annotations;
|
||||||
|
use anki_proto_gen::determine_if_message_is_empty;
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use prost_reflect::DescriptorPool;
|
use prost_reflect::DescriptorPool;
|
||||||
|
@ -55,7 +57,13 @@ pub fn write_rust_protos(descriptors_path: Option<PathBuf>) -> Result<Descriptor
|
||||||
)?;
|
)?;
|
||||||
write_file_if_changed(descriptors_path, &descriptors)?;
|
write_file_if_changed(descriptors_path, &descriptors)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let pool = DescriptorPool::decode(descriptors.as_ref())?;
|
let pool = DescriptorPool::decode(descriptors.as_ref())?;
|
||||||
|
add_must_use_annotations(
|
||||||
|
&out_dir,
|
||||||
|
|path| path.file_name().unwrap().starts_with("anki."),
|
||||||
|
|path, name| determine_if_message_is_empty(&pool, path, name),
|
||||||
|
)?;
|
||||||
Ok(pool)
|
Ok(pool)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,13 @@ license.workspace = true
|
||||||
rust-version.workspace = true
|
rust-version.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
anki_io.workspace = true
|
||||||
|
anyhow.workspace = true
|
||||||
|
camino.workspace = true
|
||||||
inflections.workspace = true
|
inflections.workspace = true
|
||||||
itertools.workspace = true
|
itertools.workspace = true
|
||||||
|
once_cell.workspace = true
|
||||||
prost-reflect.workspace = true
|
prost-reflect.workspace = true
|
||||||
prost-types.workspace = true
|
prost-types.workspace = true
|
||||||
|
regex.workspace = true
|
||||||
|
walkdir.workspace = true
|
||||||
|
|
|
@ -5,14 +5,24 @@
|
||||||
//! match.
|
//! match.
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use anki_io::read_to_string;
|
||||||
|
use anki_io::write_file_if_changed;
|
||||||
|
use anki_io::ToUtf8Path;
|
||||||
|
use anyhow::Result;
|
||||||
|
use camino::Utf8Path;
|
||||||
use inflections::Inflect;
|
use inflections::Inflect;
|
||||||
use itertools::Either;
|
use itertools::Either;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
use prost_reflect::DescriptorPool;
|
use prost_reflect::DescriptorPool;
|
||||||
use prost_reflect::MessageDescriptor;
|
use prost_reflect::MessageDescriptor;
|
||||||
use prost_reflect::MethodDescriptor;
|
use prost_reflect::MethodDescriptor;
|
||||||
use prost_reflect::ServiceDescriptor;
|
use prost_reflect::ServiceDescriptor;
|
||||||
|
use regex::Captures;
|
||||||
|
use regex::Regex;
|
||||||
|
use walkdir::WalkDir;
|
||||||
|
|
||||||
/// We look for ExampleService and BackedExampleService, both of which are
|
/// We look for ExampleService and BackedExampleService, both of which are
|
||||||
/// expected to exist (but may be empty).
|
/// expected to exist (but may be empty).
|
||||||
|
@ -150,16 +160,16 @@ impl Method {
|
||||||
|
|
||||||
/// The input type, if not empty.
|
/// The input type, if not empty.
|
||||||
pub fn input(&self) -> Option<MessageDescriptor> {
|
pub fn input(&self) -> Option<MessageDescriptor> {
|
||||||
msg_if_empty(self.proto.input())
|
msg_if_not_empty(self.proto.input())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The output type, if not empty.
|
/// The output type, if not empty.
|
||||||
pub fn output(&self) -> Option<MessageDescriptor> {
|
pub fn output(&self) -> Option<MessageDescriptor> {
|
||||||
msg_if_empty(self.proto.output())
|
msg_if_not_empty(self.proto.output())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn msg_if_empty(msg: MessageDescriptor) -> Option<MessageDescriptor> {
|
fn msg_if_not_empty(msg: MessageDescriptor) -> Option<MessageDescriptor> {
|
||||||
if msg.full_name() == "anki.generic.Empty" {
|
if msg.full_name() == "anki.generic.Empty" {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
|
@ -199,3 +209,60 @@ impl<'a> MethodComments<'a> {
|
||||||
.and_then(|s| if s.is_empty() { None } else { Some(s.into()) })
|
.and_then(|s| if s.is_empty() { None } else { Some(s.into()) })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn add_must_use_annotations<P, E>(
|
||||||
|
out_dir: &PathBuf,
|
||||||
|
should_process_path: P,
|
||||||
|
is_empty: E,
|
||||||
|
) -> Result<()>
|
||||||
|
where
|
||||||
|
P: Fn(&Utf8Path) -> bool,
|
||||||
|
E: Fn(&Utf8Path, &str) -> bool,
|
||||||
|
{
|
||||||
|
for file in WalkDir::new(out_dir).into_iter() {
|
||||||
|
let file = file?;
|
||||||
|
let path = file.path().utf8()?;
|
||||||
|
if path.file_name().unwrap().ends_with(".rs") && should_process_path(path) {
|
||||||
|
add_must_use_annotations_to_file(path, &is_empty)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_must_use_annotations_to_file<E>(path: &Utf8Path, is_empty: E) -> Result<()>
|
||||||
|
where
|
||||||
|
E: Fn(&Utf8Path, &str) -> bool,
|
||||||
|
{
|
||||||
|
static MESSAGE_OR_ENUM_RE: Lazy<Regex> =
|
||||||
|
Lazy::new(|| Regex::new(r#"pub (struct|enum) ([[:alnum:]]+?)\s"#).unwrap());
|
||||||
|
let contents = read_to_string(path)?;
|
||||||
|
let contents = MESSAGE_OR_ENUM_RE.replace_all(&contents, |caps: &Captures| {
|
||||||
|
let is_enum = caps.get(1).unwrap().as_str() == "enum";
|
||||||
|
let name = caps.get(2).unwrap().as_str();
|
||||||
|
if is_enum || !is_empty(path, name) {
|
||||||
|
format!("#[must_use]\n{}", caps.get(0).unwrap().as_str())
|
||||||
|
} else {
|
||||||
|
caps.get(0).unwrap().as_str().to_string()
|
||||||
|
}
|
||||||
|
});
|
||||||
|
write_file_if_changed(path, contents.as_ref())?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Given a generated prost filename and a struct name, try to determine whether
|
||||||
|
/// the message has 0 fields.
|
||||||
|
///
|
||||||
|
/// This is unfortunately rather circuitous, as Prost doesn't allow us to easily
|
||||||
|
/// alter the code generation with access to the associated proto descriptor. So
|
||||||
|
/// we need to infer the full proto path based on the filename and the Rust type
|
||||||
|
/// name, which we can only do for top-level elements. For any nested messages
|
||||||
|
/// we can't find, we assume they must be used.
|
||||||
|
pub fn determine_if_message_is_empty(pool: &DescriptorPool, path: &Utf8Path, name: &str) -> bool {
|
||||||
|
let package = path.file_stem().unwrap();
|
||||||
|
let full_name = format!("{package}.{name}");
|
||||||
|
if let Some(msg) = pool.get_message_by_name(&full_name) {
|
||||||
|
msg.fields().count() == 0
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -243,7 +243,7 @@ fn hide_default_deck(node: &mut DeckTreeNode) {
|
||||||
// can't remove if there are no other decks
|
// can't remove if there are no other decks
|
||||||
} else {
|
} else {
|
||||||
// safe to remove
|
// safe to remove
|
||||||
node.children.remove(idx);
|
_ = node.children.remove(idx);
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue