diff --git a/.github/workflows/continuous-integration-workflow.yaml b/.github/workflows/continuous-integration-workflow.yaml index b20812f2b..e2c961464 100644 --- a/.github/workflows/continuous-integration-workflow.yaml +++ b/.github/workflows/continuous-integration-workflow.yaml @@ -12,11 +12,8 @@ jobs: with: submodules: recursive - name: install toolchain - uses: actions-rs/toolchain@v1 + uses: dtolnay/rust-toolchain@stable with: - toolchain: stable - default: true - profile: minimal components: rustfmt - name: Install Protoc uses: arduino/setup-protoc@v1 @@ -25,10 +22,7 @@ jobs: - name: build b_tests run: cargo build --package b_tests - name: rustfmt - uses: actions-rs/cargo@v1 - with: - command: fmt - args: --all -- --check + run: cargo fmt --all --check # Disabled because downstream crates don't check this as well # minversions: # runs-on: ubuntu-latest @@ -63,13 +57,29 @@ jobs: # args: --workspace --all-targets # toolchain: stable + machete: + runs-on: ubuntu-latest + steps: + - name: checkout + uses: actions/checkout@v3 + with: + submodules: recursive + - name: install toolchain + uses: dtolnay/rust-toolchain@stable + - name: Install cargo-machete + uses: baptiste0928/cargo-install@v2 + with: + crate: cargo-machete + - name: Check unused dependencies + run: cargo machete + test: runs-on: ${{ matrix.os }} strategy: matrix: toolchain: - stable - - 1.56.1 + - "1.60" os: - ubuntu-latest - macos-latest @@ -80,29 +90,55 @@ jobs: with: submodules: recursive - name: install toolchain - uses: actions-rs/toolchain@v1 + uses: dtolnay/rust-toolchain@master with: toolchain: ${{ matrix.toolchain }} - default: true - profile: minimal - name: Install Protoc uses: arduino/setup-protoc@v1 with: repo-token: ${{ secrets.GITHUB_TOKEN }} - name: install ninja uses: seanmiddleditch/gha-setup-ninja@v3 - - uses: Swatinem/rust-cache@v1 + - uses: Swatinem/rust-cache@v2 - name: test - uses: actions-rs/cargo@v1 - with: - command: test - args: --workspace --all-targets + run: cargo test --workspace --all-targets - name: test no-default-features - uses: actions-rs/cargo@v1 + run: cargo test -p prost-build -p prost-derive -p prost-types --all-targets --no-default-features + # Run doc tests separately: https://github.com/rust-lang/cargo/issues/6669 + - name: test doc + run: cargo test --workspace --doc + - name: test doc + run: cargo test -p prost-build -p prost-derive -p prost-types --doc --no-default-features + + kani: + runs-on: ubuntu-latest + steps: + - name: checkout + uses: actions/checkout@v3 + - name: Verify with Kani + uses: model-checking/kani-github-action@0.23 with: - command: test - args: --no-default-features + enable-propproof: true + args: | + --tests -p prost-types --default-unwind 3 \ + --harness "tests::check_timestamp_roundtrip_via_system_time" \ + --harness "tests::check_duration_roundtrip_nanos" + # --default-unwind N roughly corresponds to how much effort + # Kani will spend trying to prove correctness of the + # program. Higher the number, more programs can be proven + # correct. However, Kani will require more time and memory. If + # Kani fails with "Failed Checks: unwinding assertion," this + # number may need to be raised for Kani to succeed. + no-std: + runs-on: ubuntu-latest + steps: + - name: checkout + uses: actions/checkout@v3 + with: + submodules: recursive + - name: install toolchain + uses: dtolnay/rust-toolchain@nightly # no-std: # runs-on: ubuntu-latest # steps: @@ -120,6 +156,22 @@ jobs: uses: arduino/setup-protoc@v1 with: repo-token: ${{ secrets.GITHUB_TOKEN }} + - uses: Swatinem/rust-cache@v2 + - name: install cargo-no-std-check + uses: baptiste0928/cargo-install@v2 + with: + crate: cargo-no-std-check + - name: prost cargo-no-std-check + run: cargo no-std-check --manifest-path Cargo.toml --no-default-features + - name: prost-types cargo-no-std-check + run: cargo no-std-check --manifest-path prost-types/Cargo.toml --no-default-features + # prost-build depends on prost with --no-default-features, but when + # prost-build is built through the workspace, prost typically has default + # features enabled due to vagaries in Cargo workspace feature resolution. + # This additional check ensures that prost-build does not rely on any of + # prost's default features to compile. + - name: prost-build check + run: cargo check --manifest-path prost-build/Cargo.toml # - uses: Swatinem/rust-cache@v1 # - name: install cargo-no-std-check # uses: actions-rs/cargo@v1 diff --git a/Cargo.toml b/Cargo.toml index d5c6efbfb..73959d078 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "prost" -version = "0.11.3" +version = "0.11.8" authors = [ "Dan Burkert ", "Lucio Franco .cargo/config.toml + ``` + +**Please Note**: +- `features/proptest` branch under Kani is likely not the final + location for this code. If these instructions stop working, please + consult the Kani documentation and file an issue on [the Kani + repo](https://github.com/model-checking/kani.git). +- The cargo config file will force cargo to always use PropProof. To + use `proptest`, delete the file. + +## Running Kani +After installing Kani and PropProof, `cargo kani --tests` should +automatically run `proptest!` harnesses inside your crate. Use +`--harness` to run a specific harness, and `-p` for a specific +sub-crate. + +If Kani returns with an error, you can use the concrete playback +feature using `--enable-unstable --concrete-playback print` and paste +in the code to your repository. Running this harness with `cargo test` +will replay the input found by Kani that produced this crash. Please +note that this feature is unstable and using `--concrete-playback +inplace` to automatically inject a replay harness is not supported +when using PropProof. + +## Debugging CI Failure +```yaml + - name: Verify with Kani + uses: model-checking/kani-github-action@v0.xx + with: + enable-propproof: true + args: | + $KANI_ARGUMENTS +``` + +The above GitHub CI workflow is equivalent to `cargo kani +$KANI_ARGUMENTS` with PropProof installed. To replicate issues +locally, run `cargo kani` with the same arguments. diff --git a/README.md b/README.md index a1119c9a3..bed30d0ad 100644 --- a/README.md +++ b/README.md @@ -30,9 +30,9 @@ First, add `prost` and its public dependencies to your `Cargo.toml`: ```ignore [dependencies] -prost = "0.10" +prost = "0.11" # Only necessary if using Protobuf well-known types: -prost-types = "0.10" +prost-types = "0.11" ``` The recommended way to add `.proto` compilation to a Cargo project is to use the @@ -44,7 +44,7 @@ start-to-finish example. ### MSRV -`prost` follows the `tokio-rs` projects MSRV model and supports 1.56+. For more +`prost` follows the `tokio-rs` projects MSRV model and supports 1.60. For more information on the tokio msrv policy you can check it out [here][tokio msrv] [tokio msrv]: https://github.com/tokio-rs/tokio/#supported-rust-versions diff --git a/conformance/Cargo.toml b/conformance/Cargo.toml index 83249f173..acbc92976 100644 --- a/conformance/Cargo.toml +++ b/conformance/Cargo.toml @@ -11,7 +11,6 @@ edition = "2018" [dependencies] bytes = "1" env_logger = { version = "0.8", default-features = false } -log = "0.4" prost = { path = ".." } protobuf = { path = "../protobuf" } tests = { path = "../tests" } diff --git a/prost-build/Cargo.toml b/prost-build/Cargo.toml index bd3c05455..6a5c08a25 100644 --- a/prost-build/Cargo.toml +++ b/prost-build/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "prost-build" -version = "0.11.4" +version = "0.11.8" authors = [ "Dan Burkert ", "Lucio Franco ", @@ -12,7 +12,7 @@ documentation = "https://docs.rs/prost-build" readme = "README.md" description = "A Protocol Buffers implementation for the Rust Language." edition = "2021" -rust-version = "1.56" +rust-version = "1.60" [features] default = ["format"] @@ -27,15 +27,15 @@ itertools = { version = "0.10", default-features = false, features = ["use_alloc log = "0.4" multimap = { version = "0.8", default-features = false } petgraph = { version = "0.6", default-features = false } -prost = { version = "0.11.0", path = "..", default-features = false } -prost-types = { version = "0.11.0", path = "../prost-types", default-features = false } +prost = { version = "0.11.8", path = "..", default-features = false } +prost-types = { version = "0.11.8", path = "../prost-types", default-features = false } tempfile = "3" lazy_static = "1.4.0" regex = { version = "1.5.5", default-features = false, features = ["std", "unicode-bool"] } which = "4" -prettyplease = { version = "0.1", optional = true } -syn = { version = "1", features = ["full"], optional = true } +prettyplease = { version = "0.2", optional = true } +syn = { version = "2", features = ["full"], optional = true } # These two must be kept in sync, used for `cleanup-markdown` feature. pulldown-cmark = { version = "0.9.1", optional = true, default-features = false } @@ -43,4 +43,3 @@ pulldown-cmark-to-cmark = { version = "10.0.1", optional = true } [dev-dependencies] env_logger = { version = "0.8", default-features = false } - diff --git a/prost-build/src/ast.rs b/prost-build/src/ast.rs index 8a7fa2acd..7685a2e5a 100644 --- a/prost-build/src/ast.rs +++ b/prost-build/src/ast.rs @@ -5,7 +5,7 @@ use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag}; use regex::Regex; /// Comments on a Protobuf item. -#[derive(Debug, Clone)] +#[derive(Debug, Default, Clone)] pub struct Comments { /// Leading detached blocks of comments. pub leading_detached: Vec>, diff --git a/prost-build/src/code_generator.rs b/prost-build/src/code_generator.rs index fad8bb31c..2be1851bf 100644 --- a/prost-build/src/code_generator.rs +++ b/prost-build/src/code_generator.rs @@ -29,7 +29,7 @@ enum Syntax { pub struct CodeGenerator<'a> { config: &'a mut Config, package: String, - source_info: SourceCodeInfo, + source_info: Option, syntax: Syntax, message_graph: &'a MessageGraph, extern_paths: &'a ExternPaths, @@ -51,16 +51,14 @@ impl<'a> CodeGenerator<'a> { file: FileDescriptorProto, buf: &mut String, ) { - let mut source_info = file - .source_code_info - .expect("no source code info in request"); - source_info.location.retain(|location| { - let len = location.path.len(); - len > 0 && len % 2 == 0 + let source_info = file.source_code_info.map(|mut s| { + s.location.retain(|loc| { + let len = loc.path.len(); + len > 0 && len % 2 == 0 + }); + s.location.sort_by(|a, b| a.path.cmp(&b.path)); + s }); - source_info - .location - .sort_by_key(|location| location.path.clone()); let syntax = match file.syntax.as_ref().map(String::as_str) { None | Some("proto2") => Syntax::Proto2, @@ -182,6 +180,7 @@ impl<'a> CodeGenerator<'a> { self.append_doc(&fq_message_name, None); self.append_type_attributes(&fq_message_name); + self.append_message_attributes(&fq_message_name); self.push_indent(); self.buf .push_str("#[allow(clippy::derive_partial_eq_without_eq)]\n"); @@ -272,6 +271,24 @@ impl<'a> CodeGenerator<'a> { } } + fn append_message_attributes(&mut self, fq_message_name: &str) { + assert_eq!(b'.', fq_message_name.as_bytes()[0]); + for attribute in self.config.message_attributes.get(fq_message_name) { + push_indent(self.buf, self.depth); + self.buf.push_str(attribute); + self.buf.push('\n'); + } + } + + fn append_enum_attributes(&mut self, fq_message_name: &str) { + assert_eq!(b'.', fq_message_name.as_bytes()[0]); + for attribute in self.config.enum_attributes.get(fq_message_name) { + push_indent(self.buf, self.depth); + self.buf.push_str(attribute); + self.buf.push('\n'); + } + } + fn append_field_attributes(&mut self, fq_message_name: &str, field_name: &str) { assert_eq!(b'.', fq_message_name.as_bytes()[0]); for attribute in self @@ -293,10 +310,15 @@ impl<'a> CodeGenerator<'a> { let ty = self.resolve_type(&field, fq_message_name); let boxed = !repeated - && (type_ == Type::Message || type_ == Type::Group) - && self - .message_graph - .is_nested(field.type_name(), fq_message_name); + && ((type_ == Type::Message || type_ == Type::Group) + && self + .message_graph + .is_nested(field.type_name(), fq_message_name)) + || (self + .config + .boxed + .get_first_field(&fq_message_name, field.name()) + .is_some()); debug!( " field: {:?}, type: {:?}, boxed: {}", @@ -536,6 +558,7 @@ impl<'a> CodeGenerator<'a> { let oneof_name = format!("{}.{}", fq_message_name, oneof.name()); self.append_type_attributes(&oneof_name); + self.append_enum_attributes(&oneof_name); self.push_indent(); self.buf .push_str("#[allow(clippy::derive_partial_eq_without_eq)]\n"); @@ -569,10 +592,15 @@ impl<'a> CodeGenerator<'a> { self.push_indent(); let ty = self.resolve_type(&field, fq_message_name); - let boxed = (type_ == Type::Message || type_ == Type::Group) + let boxed = ((type_ == Type::Message || type_ == Type::Group) && self .message_graph - .is_nested(field.type_name(), fq_message_name); + .is_nested(field.type_name(), fq_message_name)) + || (self + .config + .boxed + .get_first_field(&oneof_name, field.name()) + .is_some()); debug!( " oneof: {:?}, type: {:?}, boxed: {}", @@ -599,14 +627,13 @@ impl<'a> CodeGenerator<'a> { self.buf.push_str("}\n"); } - fn location(&self) -> &Location { - let idx = self - .source_info + fn location(&self) -> Option<&Location> { + let source_info = self.source_info.as_ref()?; + let idx = source_info .location .binary_search_by_key(&&self.path[..], |location| &location.path[..]) .unwrap(); - - &self.source_info.location[idx] + Some(&source_info.location[idx]) } fn append_doc(&mut self, fq_name: &str, field_name: Option<&str>) { @@ -619,7 +646,9 @@ impl<'a> CodeGenerator<'a> { self.config.disable_comments.get(fq_name).next().is_none() }; if append_doc { - Comments::from_location(self.location()).append_with_indent(self.depth, self.buf) + if let Some(comments) = self.location().map(Comments::from_location) { + comments.append_with_indent(self.depth, self.buf); + } } } @@ -646,6 +675,7 @@ impl<'a> CodeGenerator<'a> { self.append_doc(&fq_proto_enum_name, None); self.append_type_attributes(&fq_proto_enum_name); + self.append_enum_attributes(&fq_proto_enum_name); self.push_indent(); self.buf.push_str( &format!("#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, {}::Enumeration)]\n",self.config.prost_path.as_deref().unwrap_or("::prost")), @@ -745,7 +775,7 @@ impl<'a> CodeGenerator<'a> { for variant in variant_mappings.iter() { self.push_indent(); - self.buf.push_str("\""); + self.buf.push('\"'); self.buf.push_str(variant.proto_name); self.buf.push_str("\" => Some(Self::"); self.buf.push_str(&variant.generated_variant_name); @@ -772,7 +802,10 @@ impl<'a> CodeGenerator<'a> { let name = service.name().to_owned(); debug!(" service: {:?}", name); - let comments = Comments::from_location(self.location()); + let comments = self + .location() + .map(Comments::from_location) + .unwrap_or_default(); self.path.push(2); let methods = service @@ -781,8 +814,12 @@ impl<'a> CodeGenerator<'a> { .enumerate() .map(|(idx, mut method)| { debug!(" method: {:?}", method.name()); + self.path.push(idx as i32); - let comments = Comments::from_location(self.location()); + let comments = self + .location() + .map(Comments::from_location) + .unwrap_or_default(); self.path.pop(); let name = method.name.take().unwrap(); diff --git a/prost-build/src/fixtures/field_attributes/_expected_field_attributes.rs b/prost-build/src/fixtures/field_attributes/_expected_field_attributes.rs new file mode 100644 index 000000000..04860e63d --- /dev/null +++ b/prost-build/src/fixtures/field_attributes/_expected_field_attributes.rs @@ -0,0 +1,33 @@ +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Container { + #[prost(oneof="container::Data", tags="1, 2")] + pub data: ::core::option::Option, +} +/// Nested message and enum types in `Container`. +pub mod container { + #[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum Data { + #[prost(message, tag="1")] + Foo(::prost::alloc::boxed::Box), + #[prost(message, tag="2")] + Bar(super::Bar), + } +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Foo { + #[prost(string, tag="1")] + pub foo: ::prost::alloc::string::String, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Bar { + #[prost(message, optional, boxed, tag="1")] + pub qux: ::core::option::Option<::prost::alloc::boxed::Box>, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Qux { +} diff --git a/prost-build/src/fixtures/field_attributes/_expected_field_attributes_formatted.rs b/prost-build/src/fixtures/field_attributes/_expected_field_attributes_formatted.rs new file mode 100644 index 000000000..8c329f902 --- /dev/null +++ b/prost-build/src/fixtures/field_attributes/_expected_field_attributes_formatted.rs @@ -0,0 +1,32 @@ +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Container { + #[prost(oneof = "container::Data", tags = "1, 2")] + pub data: ::core::option::Option, +} +/// Nested message and enum types in `Container`. +pub mod container { + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum Data { + #[prost(message, tag = "1")] + Foo(::prost::alloc::boxed::Box), + #[prost(message, tag = "2")] + Bar(super::Bar), + } +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Foo { + #[prost(string, tag = "1")] + pub foo: ::prost::alloc::string::String, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Bar { + #[prost(message, optional, boxed, tag = "1")] + pub qux: ::core::option::Option<::prost::alloc::boxed::Box>, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Qux {} diff --git a/prost-build/src/fixtures/field_attributes/field_attributes.proto b/prost-build/src/fixtures/field_attributes/field_attributes.proto new file mode 100644 index 000000000..9ef5aa89d --- /dev/null +++ b/prost-build/src/fixtures/field_attributes/field_attributes.proto @@ -0,0 +1,21 @@ +syntax = "proto3"; + +package field_attributes; + +message Container { + oneof data { + Foo foo = 1; + Bar bar = 2; + } +} + +message Foo { + string foo = 1; +} + +message Bar { + Qux qux = 1; +} + +message Qux { +} diff --git a/prost-build/src/fixtures/helloworld/_expected_helloworld.rs b/prost-build/src/fixtures/helloworld/_expected_helloworld.rs new file mode 100644 index 000000000..2f05c46ed --- /dev/null +++ b/prost-build/src/fixtures/helloworld/_expected_helloworld.rs @@ -0,0 +1,44 @@ +#[derive(derive_builder::Builder)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Message { + #[prost(string, tag="1")] + pub say: ::prost::alloc::string::String, +} +#[derive(derive_builder::Builder)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Response { + #[prost(string, tag="1")] + pub say: ::prost::alloc::string::String, +} +#[some_enum_attr(u8)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum ServingStatus { + Unknown = 0, + Serving = 1, + NotServing = 2, +} +impl ServingStatus { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + ServingStatus::Unknown => "UNKNOWN", + ServingStatus::Serving => "SERVING", + ServingStatus::NotServing => "NOT_SERVING", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "UNKNOWN" => Some(Self::Unknown), + "SERVING" => Some(Self::Serving), + "NOT_SERVING" => Some(Self::NotServing), + _ => None, + } + } +} diff --git a/prost-build/src/fixtures/helloworld/_expected_helloworld_formatted.rs b/prost-build/src/fixtures/helloworld/_expected_helloworld_formatted.rs new file mode 100644 index 000000000..a64c4da3c --- /dev/null +++ b/prost-build/src/fixtures/helloworld/_expected_helloworld_formatted.rs @@ -0,0 +1,44 @@ +#[derive(derive_builder::Builder)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Message { + #[prost(string, tag = "1")] + pub say: ::prost::alloc::string::String, +} +#[derive(derive_builder::Builder)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Response { + #[prost(string, tag = "1")] + pub say: ::prost::alloc::string::String, +} +#[some_enum_attr(u8)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum ServingStatus { + Unknown = 0, + Serving = 1, + NotServing = 2, +} +impl ServingStatus { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + ServingStatus::Unknown => "UNKNOWN", + ServingStatus::Serving => "SERVING", + ServingStatus::NotServing => "NOT_SERVING", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "UNKNOWN" => Some(Self::Unknown), + "SERVING" => Some(Self::Serving), + "NOT_SERVING" => Some(Self::NotServing), + _ => None, + } + } +} diff --git a/prost-build/src/fixtures/helloworld/types.proto b/prost-build/src/fixtures/helloworld/types.proto index 4d9d5e0e2..5bf84aa67 100644 --- a/prost-build/src/fixtures/helloworld/types.proto +++ b/prost-build/src/fixtures/helloworld/types.proto @@ -9,3 +9,9 @@ message Message { message Response { string say = 1; } + +enum ServingStatus { + UNKNOWN = 0; + SERVING = 1; + NOT_SERVING = 2; +} diff --git a/prost-build/src/lib.rs b/prost-build/src/lib.rs index 384318255..edbc4aaec 100644 --- a/prost-build/src/lib.rs +++ b/prost-build/src/lib.rs @@ -1,4 +1,4 @@ -#![doc(html_root_url = "https://docs.rs/prost-build/0.11.4")] +#![doc(html_root_url = "https://docs.rs/prost-build/0.11.8")] #![allow(clippy::option_as_ref_deref, clippy::format_push_string)] //! `prost-build` compiles `.proto` files into Rust. @@ -250,7 +250,10 @@ pub struct Config { map_type: PathMap, bytes_type: PathMap, type_attributes: PathMap, + message_attributes: PathMap, + enum_attributes: PathMap, field_attributes: PathMap, + boxed: PathMap<()>, prost_types: bool, strip_enum_prefix: bool, out_dir: Option, @@ -495,16 +498,112 @@ impl Config { self } - pub fn types_attribute(&mut self, paths: &[P], attribute: A) -> &mut Self + /// Add additional attribute to matched messages. + /// + /// # Arguments + /// + /// **`paths`** - a path matching any number of types. It works the same way as in + /// [`btree_map`](#method.btree_map), just with the field name omitted. + /// + /// **`attribute`** - an arbitrary string to be placed before each matched type. The + /// expected usage are additional attributes, but anything is allowed. + /// + /// The calls to this method are cumulative. They don't overwrite previous calls and if a + /// type is matched by multiple calls of the method, all relevant attributes are added to + /// it. + /// + /// For things like serde it might be needed to combine with [field + /// attributes](#method.field_attribute). + /// + /// # Examples + /// + /// ```rust + /// # let mut config = prost_build::Config::new(); + /// // Nothing around uses floats, so we can derive real `Eq` in addition to `PartialEq`. + /// config.message_attribute(".", "#[derive(Eq)]"); + /// // Some messages want to be serializable with serde as well. + /// config.message_attribute("my_messages.MyMessageType", + /// "#[derive(Serialize)] #[serde(rename_all = \"snake_case\")]"); + /// config.message_attribute("my_messages.MyMessageType.MyNestedMessageType", + /// "#[derive(Serialize)] #[serde(rename_all = \"snake_case\")]"); + /// ``` + pub fn message_attribute(&mut self, path: P, attribute: A) -> &mut Self where P: AsRef, A: AsRef, { - for path in paths.iter() { - self.type_attributes - .insert(path.as_ref().to_string(), attribute.as_ref().to_string()); - } + self.message_attributes + .insert(path.as_ref().to_string(), attribute.as_ref().to_string()); + self + } + + /// Add additional attribute to matched enums and one-ofs. + /// + /// # Arguments + /// + /// **`paths`** - a path matching any number of types. It works the same way as in + /// [`btree_map`](#method.btree_map), just with the field name omitted. + /// + /// **`attribute`** - an arbitrary string to be placed before each matched type. The + /// expected usage are additional attributes, but anything is allowed. + /// + /// The calls to this method are cumulative. They don't overwrite previous calls and if a + /// type is matched by multiple calls of the method, all relevant attributes are added to + /// it. + /// + /// For things like serde it might be needed to combine with [field + /// attributes](#method.field_attribute). + /// + /// # Examples + /// + /// ```rust + /// # let mut config = prost_build::Config::new(); + /// // Nothing around uses floats, so we can derive real `Eq` in addition to `PartialEq`. + /// config.enum_attribute(".", "#[derive(Eq)]"); + /// // Some messages want to be serializable with serde as well. + /// config.enum_attribute("my_messages.MyEnumType", + /// "#[derive(Serialize)] #[serde(rename_all = \"snake_case\")]"); + /// config.enum_attribute("my_messages.MyMessageType.MyNestedEnumType", + /// "#[derive(Serialize)] #[serde(rename_all = \"snake_case\")]"); + /// ``` + /// + /// # Oneof fields + /// + /// The `oneof` fields don't have a type name of their own inside Protobuf. Therefore, the + /// field name can be used both with `enum_attribute` and `field_attribute` ‒ the first is + /// placed before the `enum` type definition, the other before the field inside corresponding + /// message `struct`. + /// + /// In other words, to place an attribute on the `enum` implementing the `oneof`, the match + /// would look like `my_messages.MyNestedMessageType.oneofname`. + pub fn enum_attribute(&mut self, path: P, attribute: A) -> &mut Self + where + P: AsRef, + A: AsRef, + { + self.enum_attributes + .insert(path.as_ref().to_string(), attribute.as_ref().to_string()); + self + } + /// Wrap matched fields in a `Box`. + /// + /// # Arguments + /// + /// **`path`** - a path matching any number of fields. These fields get the attribute. + /// For details about matching fields see [`btree_map`](#method.btree_map). + /// + /// # Examples + /// + /// ```rust + /// # let mut config = prost_build::Config::new(); + /// config.boxed(".my_messages.MyMessageType.my_field"); + /// ``` + pub fn boxed

(&mut self, path: P) -> &mut Self + where + P: AsRef, + { + self.boxed.insert(path.as_ref().to_string(), ()); self } @@ -704,8 +803,7 @@ impl Config { /// In combination with with `file_descriptor_set_path`, this can be used to provide a file /// descriptor set as an input file, rather than having prost-build generate the file by calling - /// protoc. Prost-build does require that the descriptor set was generated with - /// --include_source_info. + /// protoc. /// /// In `build.rs`: /// @@ -745,6 +843,7 @@ impl Config { } /// Configures what filename protobufs with no package definition are written to. + /// The filename will be appended with the `.rs` extension. pub fn default_package_filename(&mut self, filename: S) -> &mut Self where S: Into, @@ -858,6 +957,99 @@ impl Config { self } + /// Compile a [`FileDescriptorSet`] into Rust files during a Cargo build with + /// additional code generator configuration options. + /// + /// This method is like `compile_protos` function except it does not invoke `protoc` + /// and instead requires the user to supply a [`FileDescriptorSet`]. + /// + /// # Example `build.rs` + /// + /// ```rust,no_run + /// # use prost_types::FileDescriptorSet; + /// # fn fds() -> FileDescriptorSet { todo!() } + /// fn main() -> std::io::Result<()> { + /// let file_descriptor_set = fds(); + /// + /// prost_build::Config::new() + /// .compile_fds(file_descriptor_set) + /// } + /// ``` + pub fn compile_fds(&mut self, fds: FileDescriptorSet) -> Result<()> { + let mut target_is_env = false; + let target: PathBuf = self.out_dir.clone().map(Ok).unwrap_or_else(|| { + env::var_os("OUT_DIR") + .ok_or_else(|| { + Error::new(ErrorKind::Other, "OUT_DIR environment variable is not set") + }) + .map(|val| { + target_is_env = true; + Into::into(val) + }) + })?; + + let requests = fds + .file + .into_iter() + .map(|descriptor| { + ( + Module::from_protobuf_package_name(descriptor.package()), + descriptor, + ) + }) + .collect::>(); + + let file_names = requests + .iter() + .map(|req| { + ( + req.0.clone(), + req.0.to_file_name_or(&self.default_package_filename), + ) + }) + .collect::>(); + + let modules = self.generate(requests)?; + for (module, content) in &modules { + let file_name = file_names + .get(module) + .expect("every module should have a filename"); + let output_path = target.join(file_name); + + let previous_content = fs::read(&output_path); + + if previous_content + .map(|previous_content| previous_content == content.as_bytes()) + .unwrap_or(false) + { + trace!("unchanged: {:?}", file_name); + } else { + trace!("writing: {:?}", file_name); + let mut file = std::fs::File::create(output_path)?; + + for i in &self.start_file_with { + writeln!(file, "{}", i)?; + } + + writeln!(file, "{}", content)?; + } + } + + if let Some(ref include_file) = self.include_file { + trace!("Writing include file: {:?}", target.join(include_file)); + let mut file = fs::File::create(target.join(include_file))?; + self.write_includes( + modules.keys().collect(), + &mut file, + 0, + if target_is_env { None } else { Some(&target) }, + )?; + file.flush()?; + } + + Ok(()) + } + /// Compile `.proto` files into Rust files during a Cargo build with additional code generator /// configuration options. /// @@ -883,18 +1075,6 @@ impl Config { protos: &[impl AsRef], includes: &[impl AsRef], ) -> Result<()> { - let mut target_is_env = false; - let target: PathBuf = self.out_dir.clone().map(Ok).unwrap_or_else(|| { - env::var_os("OUT_DIR") - .ok_or_else(|| { - Error::new(ErrorKind::Other, "OUT_DIR environment variable is not set") - }) - .map(|val| { - target_is_env = true; - Into::into(val) - }) - })?; - // TODO: This should probably emit 'rerun-if-changed=PATH' directives for cargo, however // according to [1] if any are output then those paths replace the default crate root, // which is undesirable. Figure out how to do it in an additive way; perhaps gcc-rs has @@ -982,67 +1162,7 @@ impl Config { ) })?; - let requests = file_descriptor_set - .file - .into_iter() - .map(|descriptor| { - ( - Module::from_protobuf_package_name(descriptor.package()), - descriptor, - ) - }) - .collect::>(); - - let file_names = requests - .iter() - .map(|req| { - ( - req.0.clone(), - req.0.to_file_name_or(&self.default_package_filename), - ) - }) - .collect::>(); - - let modules = self.generate(requests)?; - for (module, content) in &modules { - let file_name = file_names - .get(module) - .expect("every module should have a filename"); - let output_path = target.join(file_name); - - let previous_content = fs::read(&output_path); - - if previous_content - .map(|previous_content| previous_content == content.as_bytes()) - .unwrap_or(false) - { - trace!("unchanged: {:?}", file_name); - } else { - trace!("writing: {:?}", file_name); - - let mut file = std::fs::File::create(output_path)?; - - for i in &self.start_file_with { - writeln!(file, "{}", i)?; - } - - writeln!(file, "{}", content)?; - } - } - - if let Some(ref include_file) = self.include_file { - trace!("Writing include file: {:?}", target.join(include_file)); - let mut file = fs::File::create(target.join(include_file))?; - self.write_includes( - modules.keys().collect(), - &mut file, - 0, - if target_is_env { None } else { Some(&target) }, - )?; - file.flush()?; - } - - Ok(()) + self.compile_fds(file_descriptor_set) } fn write_includes( @@ -1178,7 +1298,10 @@ impl default::Default for Config { map_type: PathMap::default(), bytes_type: PathMap::default(), type_attributes: PathMap::default(), + message_attributes: PathMap::default(), + enum_attributes: PathMap::default(), field_attributes: PathMap::default(), + boxed: PathMap::default(), prost_types: true, strip_enum_prefix: true, out_dir: None, @@ -1349,6 +1472,32 @@ pub fn compile_protos(protos: &[impl AsRef], includes: &[impl AsRef] Config::new().compile_protos(protos, includes) } +/// Compile a [`FileDescriptorSet`] into Rust files during a Cargo build. +/// +/// The generated `.rs` files are written to the Cargo `OUT_DIR` directory, suitable for use with +/// the [include!][1] macro. See the [Cargo `build.rs` code generation][2] example for more info. +/// +/// This function should be called in a project's `build.rs`. +/// +/// This function can be combined with a crate like [`protox`] which outputs a +/// [`FileDescriptorSet`] and is a pure Rust implementation of `protoc`. +/// +/// [`protox`]: https://github.com/andrewhickman/protox +/// +/// # Example +/// ```rust,no_run +/// # use prost_types::FileDescriptorSet; +/// # fn fds() -> FileDescriptorSet { todo!() } +/// fn main() -> std::io::Result<()> { +/// let file_descriptor_set = fds(); +/// +/// prost_build::compile_fds(file_descriptor_set) +/// } +/// ``` +pub fn compile_fds(fds: FileDescriptorSet) -> Result<()> { + Config::new().compile_fds(fds) +} + /// Returns the path to the `protoc` binary. pub fn protoc_from_env() -> PathBuf { let os_specific_hint = if cfg!(target_os = "macos") { @@ -1507,6 +1656,42 @@ mod tests { assert_eq!(state.finalized, 3); } + #[test] + fn test_generate_message_attributes() { + let _ = env_logger::try_init(); + + let out_dir = std::env::temp_dir(); + + Config::new() + .out_dir(out_dir.clone()) + .message_attribute(".", "#[derive(derive_builder::Builder)]") + .enum_attribute(".", "#[some_enum_attr(u8)]") + .compile_protos( + &["src/fixtures/helloworld/hello.proto"], + &["src/fixtures/helloworld"], + ) + .unwrap(); + + let out_file = out_dir + .join("helloworld.rs") + .as_path() + .display() + .to_string(); + #[cfg(feature = "format")] + let expected_content = + read_all_content("src/fixtures/helloworld/_expected_helloworld_formatted.rs") + .replace("\r\n", "\n"); + #[cfg(not(feature = "format"))] + let expected_content = read_all_content("src/fixtures/helloworld/_expected_helloworld.rs") + .replace("\r\n", "\n"); + let content = read_all_content(&out_file).replace("\r\n", "\n"); + assert_eq!( + expected_content, content, + "Unexpected content: \n{}", + content + ); + } + #[test] fn test_generate_no_empty_outputs() { let _ = env_logger::try_init(); @@ -1556,6 +1741,47 @@ mod tests { } } + #[test] + fn test_generate_field_attributes() { + let _ = env_logger::try_init(); + + let out_dir = std::env::temp_dir(); + + Config::new() + .out_dir(out_dir.clone()) + .boxed("Container.data.foo") + .boxed("Bar.qux") + .compile_protos( + &["src/fixtures/field_attributes/field_attributes.proto"], + &["src/fixtures/field_attributes"], + ) + .unwrap(); + + let out_file = out_dir + .join("field_attributes.rs") + .as_path() + .display() + .to_string(); + + let content = read_all_content(&out_file).replace("\r\n", "\n"); + + #[cfg(feature = "format")] + let expected_content = read_all_content( + "src/fixtures/field_attributes/_expected_field_attributes_formatted.rs", + ) + .replace("\r\n", "\n"); + #[cfg(not(feature = "format"))] + let expected_content = + read_all_content("src/fixtures/field_attributes/_expected_field_attributes.rs") + .replace("\r\n", "\n"); + + assert_eq!( + expected_content, content, + "Unexpected content: \n{}", + content + ); + } + #[test] fn deterministic_include_file() { let _ = env_logger::try_init(); diff --git a/prost-derive/Cargo.toml b/prost-derive/Cargo.toml index c5ff6f41f..001f46d1b 100644 --- a/prost-derive/Cargo.toml +++ b/prost-derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "prost-derive" -version = "0.11.2" +version = "0.11.8" authors = [ "Dan Burkert ", "Lucio Franco ", @@ -12,7 +12,7 @@ documentation = "https://docs.rs/prost-derive" readme = "README.md" description = "A Protocol Buffers implementation for the Rust Language." edition = "2021" -rust-version = "1.56" +rust-version = "1.60" [lib] proc_macro = true diff --git a/prost-derive/src/lib.rs b/prost-derive/src/lib.rs index d44abb075..ffea5a09c 100644 --- a/prost-derive/src/lib.rs +++ b/prost-derive/src/lib.rs @@ -1,4 +1,4 @@ -#![doc(html_root_url = "https://docs.rs/prost-derive/0.10.2")] +#![doc(html_root_url = "https://docs.rs/prost-derive/0.11.8")] // The `quote!` macro requires deep recursion. #![recursion_limit = "4096"] @@ -345,7 +345,7 @@ fn try_enumeration(input: TokenStream) -> Result { match discriminant { Some((_, expr)) => variants.push((ident, expr)), - None => bail!("Enumeration variants must have a disriminant"), + None => bail!("Enumeration variants must have a discriminant"), } } diff --git a/prost-types/Cargo.toml b/prost-types/Cargo.toml index 8893b00eb..273ec2440 100644 --- a/prost-types/Cargo.toml +++ b/prost-types/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "prost-types" -version = "0.11.2" +version = "0.11.8" authors = [ "Dan Burkert ", "Lucio Franco for Timestamp { #[cfg(test)] mod tests { - - use std::convert::TryFrom; - - use proptest::prelude::*; - use super::*; + use proptest::prelude::*; #[test] fn test_min_max() { @@ -611,6 +607,7 @@ mod tests { ); } + #[cfg(feature = "std")] #[test] fn test_datetime_from_timestamp() { let case = |expected: &str, secs: i64, nanos: i32| { @@ -850,6 +847,7 @@ mod tests { ) } + #[cfg(feature = "std")] #[test] fn check_duration_parse_to_string_roundtrip( duration in core::time::Duration::arbitrary(), diff --git a/prost-types/src/lib.rs b/prost-types/src/lib.rs index 1d4a24c4f..7d60f885c 100644 --- a/prost-types/src/lib.rs +++ b/prost-types/src/lib.rs @@ -1,4 +1,4 @@ -#![doc(html_root_url = "https://docs.rs/prost-types/0.11.2")] +#![doc(html_root_url = "https://docs.rs/prost-types/0.11.8")] //! Protocol Buffers well-known types. //! @@ -11,6 +11,12 @@ #![cfg_attr(not(feature = "std"), no_std)] +#[rustfmt::skip] +pub mod compiler; +mod datetime; +#[rustfmt::skip] +mod protobuf; + use core::convert::TryFrom; use core::fmt; use core::i32; @@ -18,12 +24,7 @@ use core::i64; use core::str::FromStr; use core::time; -include!("protobuf.rs"); -pub mod compiler { - include!("compiler.rs"); -} - -mod datetime; +pub use protobuf::*; // The Protobuf `Duration` and `Timestamp` types can't delegate to the standard library equivalents // because the Protobuf versions are signed. To make them easier to work with, `From` conversions @@ -104,7 +105,7 @@ impl TryFrom for time::Duration { /// Converts a `Duration` to a `std::time::Duration`, failing if the duration is negative. fn try_from(mut duration: Duration) -> Result { duration.normalize(); - if duration.seconds >= 0 { + if duration.seconds >= 0 && duration.nanos >= 0 { Ok(time::Duration::new( duration.seconds as u64, duration.nanos as u32, @@ -233,6 +234,25 @@ impl Timestamp { // "invalid timestamp: {:?}", self); } + /// Normalizes the timestamp to a canonical format, returning the original value if it cannot be + /// normalized. + /// + /// Normalization is based on [`google::protobuf::util::CreateNormalized`][1]. + /// + /// [1]: https://github.com/google/protobuf/blob/v3.3.2/src/google/protobuf/util/time_util.cc#L59-L77 + pub fn try_normalize(mut self) -> Result { + let before = self.clone(); + self.normalize(); + // If the seconds value has changed, and is either i64::MIN or i64::MAX, then the timestamp + // normalization overflowed. + if (self.seconds == i64::MAX || self.seconds == i64::MIN) && self.seconds != before.seconds + { + Err(before) + } else { + Ok(self) + } + } + /// Creates a new `Timestamp` at the start of the provided UTC date. pub fn date(year: i64, month: u8, day: u8) -> Result { Timestamp::date_time_nanos(year, month, day, 0, 0, 0, 0) @@ -403,11 +423,12 @@ impl fmt::Display for Timestamp { #[cfg(test)] mod tests { - use std::time::{self, SystemTime, UNIX_EPOCH}; + use super::*; + #[cfg(feature = "std")] use proptest::prelude::*; - - use super::*; + #[cfg(feature = "std")] + use std::time::{self, SystemTime, UNIX_EPOCH}; #[cfg(feature = "std")] proptest! { @@ -432,8 +453,37 @@ mod tests { #[test] fn check_duration_roundtrip( - std_duration in time::Duration::arbitrary(), + seconds in u64::arbitrary(), + nanos in 0u32..1_000_000_000u32, + ) { + let std_duration = time::Duration::new(seconds, nanos); + let prost_duration = match Duration::try_from(std_duration) { + Ok(duration) => duration, + Err(_) => return Err(TestCaseError::reject("duration out of range")), + }; + prop_assert_eq!(time::Duration::try_from(prost_duration.clone()).unwrap(), std_duration); + + if std_duration != time::Duration::default() { + let neg_prost_duration = Duration { + seconds: -prost_duration.seconds, + nanos: -prost_duration.nanos, + }; + + prop_assert!( + matches!( + time::Duration::try_from(neg_prost_duration), + Err(DurationError::NegativeDuration(d)) if d == std_duration, + ) + ) + } + } + + #[test] + fn check_duration_roundtrip_nanos( + nanos in u32::arbitrary(), ) { + let seconds = 0; + let std_duration = std::time::Duration::new(seconds, nanos); let prost_duration = match Duration::try_from(std_duration) { Ok(duration) => duration, Err(_) => return Err(TestCaseError::reject("duration out of range")), @@ -456,6 +506,24 @@ mod tests { } } + #[cfg(feature = "std")] + #[test] + fn check_duration_try_from_negative_nanos() { + let seconds: u64 = 0; + let nanos: u32 = 1; + let std_duration = std::time::Duration::new(seconds, nanos); + + let neg_prost_duration = Duration { + seconds: 0, + nanos: -1, + }; + + assert!(matches!( + time::Duration::try_from(neg_prost_duration), + Err(DurationError::NegativeDuration(d)) if d == std_duration, + )) + } + #[cfg(feature = "std")] #[test] fn check_timestamp_negative_seconds() { diff --git a/protobuf/Cargo.toml b/protobuf/Cargo.toml index eabd0430e..d3131ce68 100644 --- a/protobuf/Cargo.toml +++ b/protobuf/Cargo.toml @@ -9,7 +9,6 @@ publish = false edition = "2018" [dependencies] -bytes = { version = "1", default-features = false } prost = { path = ".." } prost-types = { path = "../prost-types" } @@ -29,7 +28,7 @@ tempfile = "3" libz-sys = { version = "1.1, < 1.1.7", optional = true } [dev-dependencies] -criterion = "0.3" +criterion = { version = "0.4", default-features = false } cfg-if = "1" [lib] @@ -39,3 +38,6 @@ bench = false [[bench]] name = "dataset" harness = false + +[package.metadata.cargo-machete] +ignored = ["prost-types"] diff --git a/src/lib.rs b/src/lib.rs index 628c34a0a..fa6079e33 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,4 +1,4 @@ -#![doc(html_root_url = "https://docs.rs/prost/0.11.3")] +#![doc(html_root_url = "https://docs.rs/prost/0.11.8")] #![cfg_attr(not(feature = "std"), no_std)] #![doc = include_str!("../README.md")] diff --git a/tests/single-include/Cargo.toml b/tests/single-include/Cargo.toml index 1d8eebebd..d95c9852c 100644 --- a/tests/single-include/Cargo.toml +++ b/tests/single-include/Cargo.toml @@ -7,7 +7,7 @@ publish = false license = "MIT" [dependencies] -prost = { path = "../../../prost" } +prost = { path = "../.." } [build-dependencies] prost-build = { path = "../../prost-build" }