diff --git a/CHANGELOG.md b/CHANGELOG.md index 1685ec811e..b70135ee71 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,8 +6,6 @@ and this project adheres to [Semantic Versioning](http://semver.org/). ## [Unreleased] -### Changed - #### Breaking - [#554](https://github.com/FuelLabs/fuel-vm/pull/554): Removed `debug` feature from the `fuel-vm`. The debugger is always available and becomes active after calling any `set_*` method. @@ -15,6 +13,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/). These opcodes charged inadequately low costs in comparison to the amount of work. This change should make all transactions that used these opcodes much more expensive than before. +- [#533](https://github.com/FuelLabs/fuel-vm/pull/533): Use custom serialization for fuel-types to allow no_std compilation. + ## [Version 0.36.1] ### Changed diff --git a/Cargo.toml b/Cargo.toml index e96ae160cf..f55bfb24c7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -23,6 +23,7 @@ version = "0.36.1" [workspace.dependencies] fuel-asm = { version = "0.36.1", path = "fuel-asm", default-features = false } fuel-crypto = { version = "0.36.1", path = "fuel-crypto", default-features = false } +fuel-derive = { version = "0.36.1", path = "fuel-derive", default-features = false } fuel-merkle = { version = "0.36.1", path = "fuel-merkle", default-features = false } fuel-storage = { version = "0.36.1", path = "fuel-storage", default-features = false } fuel-tx = { version = "0.36.1", path = "fuel-tx", default-features = false } diff --git a/fuel-asm/Cargo.toml b/fuel-asm/Cargo.toml index 0165381f35..4f41c4021e 100644 --- a/fuel-asm/Cargo.toml +++ b/fuel-asm/Cargo.toml @@ -13,6 +13,7 @@ description = "Atomic types of the FuelVM." [dependencies] arbitrary = { version = "1.1", features = ["derive"], optional = true } bitflags = "1.3" +fuel-types = { workspace = true } serde = { version = "1.0", default-features = false, features = ["derive"], optional = true } strum = { version = "0.24", default-features = false, features = ["derive"] } wasm-bindgen = { version = "0.2.87", optional = true } @@ -26,7 +27,7 @@ rstest = "0.16" [features] default = ["std"] typescript = ["wasm-bindgen", "wee_alloc"] -std = ["serde?/default"] +std = ["serde?/default", "fuel-types/std"] serde = ["dep:serde"] # docs.rs-specific configuration diff --git a/fuel-asm/src/panic_instruction.rs b/fuel-asm/src/panic_instruction.rs index eb157afc0f..a912599b36 100644 --- a/fuel-asm/src/panic_instruction.rs +++ b/fuel-asm/src/panic_instruction.rs @@ -10,6 +10,7 @@ use crate::{ #[derive(Clone, Copy, PartialEq, Eq, Hash)] #[cfg_attr(feature = "typescript", wasm_bindgen::prelude::wasm_bindgen)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[derive(fuel_types::canonical::Deserialize, fuel_types::canonical::Serialize)] #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] /// Describe a panic reason with the instruction that generated it pub struct PanicInstruction { diff --git a/fuel-asm/src/panic_reason.rs b/fuel-asm/src/panic_reason.rs index 19f2cefe65..db00f9278f 100644 --- a/fuel-asm/src/panic_reason.rs +++ b/fuel-asm/src/panic_reason.rs @@ -26,6 +26,7 @@ enum_from! { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, strum::EnumIter)] #[cfg_attr(feature = "typescript", wasm_bindgen::prelude::wasm_bindgen)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] + #[derive(fuel_types::canonical::Serialize, fuel_types::canonical::Deserialize)] #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[repr(u8)] #[non_exhaustive] diff --git a/fuel-crypto/Cargo.toml b/fuel-crypto/Cargo.toml index 1ad6ddcea2..1b77bb3303 100644 --- a/fuel-crypto/Cargo.toml +++ b/fuel-crypto/Cargo.toml @@ -34,7 +34,7 @@ sha2 = "0.10" [features] default = ["fuel-types/default", "std"] -alloc = ["rand?/alloc", "secp256k1/alloc"] +alloc = ["rand?/alloc", "secp256k1/alloc", "fuel-types/alloc"] random = ["fuel-types/random", "rand"] serde = ["dep:serde", "fuel-types/serde"] # `rand-std` is used to further protect the blinders from side-channel attacks and won't compromise diff --git a/fuel-derive/Cargo.toml b/fuel-derive/Cargo.toml new file mode 100644 index 0000000000..6bddd0d857 --- /dev/null +++ b/fuel-derive/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "fuel-derive" +version = { workspace = true } +authors = { workspace = true } +categories = { workspace = true } +edition = { workspace = true } +homepage = { workspace = true } +keywords = ["blockchain", "cryptocurrencies", "fuel-vm", "vm"] +license = { workspace = true } +repository = { workspace = true } +description = "FuelVM (de)serialization derive macros for `fuel-vm` data structures." + +[lib] +proc-macro = true + +[dependencies] +quote = "1" +syn = { version = "2", features = ["full"] } +proc-macro2 = "1" +synstructure = "0.13" diff --git a/fuel-derive/README.md b/fuel-derive/README.md new file mode 100644 index 0000000000..d28edae921 --- /dev/null +++ b/fuel-derive/README.md @@ -0,0 +1,8 @@ +# Fuel VM custom serialization derive macros + +[![build](https://github.com/FuelLabs/fuel-vm/actions/workflows/ci.yml/badge.svg)](https://github.com/FuelLabs/fuel-vm/actions/workflows/ci.yml) +[![crates.io](https://img.shields.io/crates/v/fuel-derive?label=latest)](https://crates.io/crates/fuel-derive) +[![docs](https://docs.rs/fuel-derive/badge.svg)](https://docs.rs/fuel-derive/) +[![discord](https://img.shields.io/badge/chat%20on-discord-orange?&logo=discord&logoColor=ffffff&color=7389D8&labelColor=6A7EC2)](https://discord.gg/xfpK4Pe) + +This crate contains derive macros for canonical serialization and deserialization. This is used with [`fuel-types/src/canonical.rs`](fuel-types/src/canonical.rs) module which contains the associated traits and their implementations for native Rust types. diff --git a/fuel-derive/src/attribute.rs b/fuel-derive/src/attribute.rs new file mode 100644 index 0000000000..b3ca9f265c --- /dev/null +++ b/fuel-derive/src/attribute.rs @@ -0,0 +1,172 @@ +// TODO: nice error messages instead of panics, see: https://stackoverflow.com/a/54394014/2867076 + +use std::collections::HashMap; + +use proc_macro2::{ + TokenStream, + TokenTree, +}; +use syn::{ + AttrStyle, + Attribute, + Meta, +}; +use synstructure::BindingInfo; + +fn parse_attrs(s: &synstructure::Structure) -> HashMap { + let mut attrs = HashMap::new(); + + for attr in &s.ast().attrs { + if attr.style != AttrStyle::Outer { + continue + } + if let Meta::List(ml) = &attr.meta { + if ml.path.segments.len() == 1 && ml.path.segments[0].ident == "canonical" { + let mut tt = ml.tokens.clone().into_iter().peekable(); + if let Some(key) = tt.next() { + let key = key.to_string(); + if let Some(eq_sign) = tt.peek() { + if eq_sign.to_string() == "=" { + let _ = tt.next(); + } + } else { + // Single token, no `=`, so it's a boolean flag. + let old = attrs.insert(key.clone(), TokenStream::new()); + if old.is_some() { + panic!("duplicate canonical attribute: {}", key); + } + continue + } + + // Key-value pair + let value = TokenStream::from_iter(tt); + let old = attrs.insert(key.clone(), value); + if old.is_some() { + panic!("duplicate canonical attribute: {}", key); + } + continue + } + panic!("enum-level canonical attribute must be a `key = value` pair"); + } + } + } + + attrs +} + +/// Pop-level `canonical` attributes for a struct +pub struct StructAttrs { + /// The struct is prefixed with the given word. + /// Useful with`#[canonical(inner_discriminant)]`. + /// The type must implement `Into`, which is used to serialize it. + pub prefix: Option, +} + +impl StructAttrs { + pub fn parse(s: &synstructure::Structure) -> Self { + let mut attrs = parse_attrs(s); + + let prefix = attrs.remove("prefix"); + + if !attrs.is_empty() { + panic!("unknown canonical attributes: {:?}", attrs.keys()) + } + + Self { prefix } + } +} + +/// Pop-level `canonical` attributes for an enum +#[allow(non_snake_case)] +pub struct EnumAttrs { + /// This is a wrapper enum where every variant can be recognized from it's first + /// word. This means that the enum itself doesn't have to serialize the + /// discriminant, but the field itself does so. This can be done using + /// `#[canonical(prefix = ...)]` attribute. `TryFromPrimitive` traits are used to + /// convert the raw bytes into the given type. + pub inner_discriminant: Option, + /// Replaces calculation of the serialized static size with a custom function. + pub serialized_size_static_with: Option, + /// Replaces calculation of the serialized dynamic size with a custom function. + pub serialized_size_dynamic_with: Option, + /// Replaces serialization with a custom function. + pub serialize_with: Option, + /// Replaces deserialization with a custom function. + pub deserialize_with: Option, + /// Determines whether the enum has a dynamic size when `serialize_with` is used. + pub SIZE_NO_DYNAMIC: Option, +} + +impl EnumAttrs { + #[allow(non_snake_case)] + pub fn parse(s: &synstructure::Structure) -> Self { + let mut attrs = parse_attrs(s); + + let inner_discriminant = attrs.remove("inner_discriminant"); + let serialized_size_static_with = attrs.remove("serialized_size_static_with"); + let serialized_size_dynamic_with = attrs.remove("serialized_size_dynamic_with"); + let serialize_with = attrs.remove("serialize_with"); + let deserialize_with = attrs.remove("deserialize_with"); + let SIZE_NO_DYNAMIC = attrs.remove("SIZE_NO_DYNAMIC"); + + if !attrs.is_empty() { + panic!("unknown canonical attributes: {:?}", attrs.keys()) + } + + Self { + inner_discriminant, + serialized_size_static_with, + serialized_size_dynamic_with, + serialize_with, + deserialize_with, + SIZE_NO_DYNAMIC, + } + } +} + +/// Parse `#[repr(int)]` attribute for an enum. +pub fn parse_enum_repr(attrs: &[Attribute]) -> Option { + for attr in attrs { + if attr.style != AttrStyle::Outer { + continue + } + if let Meta::List(ml) = &attr.meta { + if ml.path.segments.len() == 1 && ml.path.segments[0].ident == "repr" { + if let Some(TokenTree::Ident(ident)) = + ml.tokens.clone().into_iter().next() + { + return Some(ident.to_string()) + } + } + } + } + None +} + +/// Parse `#[canonical(skip)]` attribute for a binding field. +pub fn should_skip_field_binding(binding: &BindingInfo<'_>) -> bool { + should_skip_field(&binding.ast().attrs) +} + +/// Parse `#[canonical(skip)]` attribute for a struct field. +pub fn should_skip_field(attrs: &[Attribute]) -> bool { + for attr in attrs { + if attr.style != AttrStyle::Outer { + continue + } + if let Meta::List(ml) = &attr.meta { + if ml.path.segments.len() == 1 && ml.path.segments[0].ident == "canonical" { + for token in ml.tokens.clone() { + if let TokenTree::Ident(ident) = &token { + if ident == "skip" { + return true + } else { + panic!("unknown canonical attribute: {}", ident) + } + } + } + } + } + } + false +} diff --git a/fuel-derive/src/deserialize.rs b/fuel-derive/src/deserialize.rs new file mode 100644 index 0000000000..d598c46764 --- /dev/null +++ b/fuel-derive/src/deserialize.rs @@ -0,0 +1,210 @@ +use proc_macro2::TokenStream as TokenStream2; +use quote::quote; + +use crate::{ + attribute::{ + should_skip_field, + should_skip_field_binding, + EnumAttrs, + StructAttrs, + }, + evaluate::evaluate_simple_expr, +}; + +fn deserialize_struct(s: &mut synstructure::Structure) -> TokenStream2 { + assert_eq!(s.variants().len(), 1, "structs must have one variant"); + + let variant: &synstructure::VariantInfo = &s.variants()[0]; + let decode_main = variant.construct(|field, _| { + let ty = &field.ty; + if should_skip_field(&field.attrs) { + quote! { + ::core::default::Default::default() + } + } else { + quote! {{ + <#ty as ::fuel_types::canonical::Deserialize>::decode_static(buffer)? + }} + } + }); + + let decode_dynamic = variant.each(|binding| { + if should_skip_field_binding(binding) { + quote! { + *#binding = ::core::default::Default::default(); + } + } else { + quote! {{ + ::fuel_types::canonical::Deserialize::decode_dynamic(#binding, buffer)?; + }} + } + }); + + let remove_prefix = if let Some(expected_prefix) = StructAttrs::parse(s).prefix { + quote! {{ + let prefix = ::decode_static(buffer)?; + if prefix.try_into() != Ok(#expected_prefix) { + return ::core::result::Result::Err(::fuel_types::canonical::Error::InvalidPrefix) + } + }} + } else { + quote! {} + }; + + s.gen_impl(quote! { + gen impl ::fuel_types::canonical::Deserialize for @Self { + fn decode_static(buffer: &mut I) -> ::core::result::Result { + #remove_prefix + ::core::result::Result::Ok(#decode_main) + } + + fn decode_dynamic(&mut self, buffer: &mut I) -> ::core::result::Result<(), ::fuel_types::canonical::Error> { + match self { + #decode_dynamic, + }; + ::core::result::Result::Ok(()) + } + } + }) +} + +fn deserialize_enum(s: &synstructure::Structure) -> TokenStream2 { + let attrs = EnumAttrs::parse(s); + let _name = &s.ast().ident; + + assert!(!s.variants().is_empty(), "got invalid empty enum"); + + let mut next_discriminant = 0u64; + let decode_static: TokenStream2 = s + .variants() + .iter() + .map(|variant| { + let decode_main = variant.construct(|field, _| { + if should_skip_field(&field.attrs) { + quote! { + ::core::default::Default::default() + } + } else { + let ty = &field.ty; + quote! {{ + <#ty as ::fuel_types::canonical::Deserialize>::decode_static(buffer)? + }} + } + }); + + + let discr = if let Some(discr_type) = attrs.inner_discriminant.as_ref() { + let vname = variant.ast().ident; + quote! { #discr_type::#vname } + } else { + if let Some((_, d)) = variant.ast().discriminant { + next_discriminant = evaluate_simple_expr(d).expect("Unable to evaluate discriminant expression"); + }; + let v = next_discriminant; + next_discriminant = next_discriminant.checked_add(1).expect("Discriminant overflow"); + quote! { #v } + }; + + quote! { + #discr => { + ::core::result::Result::Ok(#decode_main) + } + } + }).collect(); + + let decode_dynamic = s.variants().iter().map(|variant| { + let decode_dynamic = variant.each(|binding| { + if should_skip_field_binding(binding) { + quote! { + *#binding = ::core::default::Default::default(); + } + } else { + quote! {{ + ::fuel_types::canonical::Deserialize::decode_dynamic(#binding, buffer)?; + }} + } + }); + + quote! { + #decode_dynamic + } + }); + + // Handle #[canonical(inner_discriminant = Type)] + let decode_discriminant = if attrs.inner_discriminant.is_some() { + quote! { + let buf = buffer.clone(); + let raw_discr = <::core::primitive::u64 as ::fuel_types::canonical::Deserialize>::decode(buffer)?; + *buffer = buf; // Restore buffer position + } + } else { + quote! { + let raw_discr = <::core::primitive::u64 as ::fuel_types::canonical::Deserialize>::decode(buffer)?; + } + }; + + // Handle #[canonical(inner_discriminant = Type)] + let mapped_discr = if let Some(discr_type) = attrs.inner_discriminant { + quote! { { + use ::num_enum::{TryFromPrimitive, IntoPrimitive}; + let Ok(discr) = #discr_type::try_from_primitive(raw_discr) else { + return ::core::result::Result::Err(::fuel_types::canonical::Error::UnknownDiscriminant) + }; + discr + } } + } else { + quote! { raw_discr } + }; + + // Handle #[canonical(deserialize_with = function)] + if let Some(helper) = attrs.deserialize_with { + return s.gen_impl(quote! { + gen impl ::fuel_types::canonical::Deserialize for @Self { + fn decode_static(buffer: &mut I) -> ::core::result::Result { + let raw_discr = <::core::primitive::u64 as ::fuel_types::canonical::Deserialize>::decode(buffer)?; + #helper(#mapped_discr, buffer) + } + + fn decode_dynamic(&mut self, buffer: &mut I) -> ::core::result::Result<(), ::fuel_types::canonical::Error> { + ::core::result::Result::Ok(()) + } + } + }) + } + + s.gen_impl(quote! { + gen impl ::fuel_types::canonical::Deserialize for @Self { + fn decode_static(buffer: &mut I) -> ::core::result::Result { + #decode_discriminant + match #mapped_discr { + #decode_static + _ => ::core::result::Result::Err(::fuel_types::canonical::Error::UnknownDiscriminant), + } + } + + fn decode_dynamic(&mut self, buffer: &mut I) -> ::core::result::Result<(), ::fuel_types::canonical::Error> { + match self { + #( + #decode_dynamic + )* + _ => return ::core::result::Result::Err(::fuel_types::canonical::Error::UnknownDiscriminant), + }; + + ::core::result::Result::Ok(()) + } + } + }) +} + +/// Derives `Deserialize` trait for the given `struct` or `enum`. +pub fn deserialize_derive(mut s: synstructure::Structure) -> TokenStream2 { + s.bind_with(|_| synstructure::BindStyle::RefMut) + .add_bounds(synstructure::AddBounds::Fields) + .underscore_const(true); + + match s.ast().data { + syn::Data::Struct(_) => deserialize_struct(&mut s), + syn::Data::Enum(_) => deserialize_enum(&s), + _ => panic!("Can't derive `Deserialize` for `union`s"), + } +} diff --git a/fuel-derive/src/evaluate.rs b/fuel-derive/src/evaluate.rs new file mode 100644 index 0000000000..fd0176689e --- /dev/null +++ b/fuel-derive/src/evaluate.rs @@ -0,0 +1,14 @@ +use syn::Expr; + +/// Evaluate a simple u64-valued expression, like enum discriminant value. +pub fn evaluate_simple_expr(expr: &Expr) -> Option { + match expr { + Expr::Lit(lit) => match &lit.lit { + syn::Lit::Int(int) => int.base10_parse().ok(), + _ => None, + }, + Expr::Paren(paren) => evaluate_simple_expr(&paren.expr), + Expr::Group(group) => evaluate_simple_expr(&group.expr), + _ => None, + } +} diff --git a/fuel-derive/src/lib.rs b/fuel-derive/src/lib.rs new file mode 100644 index 0000000000..52de6fd834 --- /dev/null +++ b/fuel-derive/src/lib.rs @@ -0,0 +1,27 @@ +//! Derive macros for canonical type serialization and deserialization. + +#![deny(unused_must_use, missing_docs)] + +extern crate proc_macro; + +mod utils; + +mod attribute; +mod deserialize; +mod evaluate; +mod serialize; + +use self::{ + deserialize::deserialize_derive, + serialize::serialize_derive, +}; +synstructure::decl_derive!( + [Deserialize, attributes(canonical)] => + /// Derives `Deserialize` trait for the given `struct` or `enum`. + deserialize_derive +); +synstructure::decl_derive!( + [Serialize, attributes(canonical)] => + /// Derives `Serialize` trait for the given `struct` or `enum`. + serialize_derive +); diff --git a/fuel-derive/src/serialize.rs b/fuel-derive/src/serialize.rs new file mode 100644 index 0000000000..f3f7dae12c --- /dev/null +++ b/fuel-derive/src/serialize.rs @@ -0,0 +1,551 @@ +use proc_macro2::TokenStream as TokenStream2; +use quote::quote; + +use core::cmp::Ordering; + +use crate::{ + attribute::{ + parse_enum_repr, + should_skip_field, + should_skip_field_binding, + EnumAttrs, + StructAttrs, + }, + evaluate::evaluate_simple_expr, +}; + +fn serialize_struct(s: &synstructure::Structure) -> TokenStream2 { + let attrs = StructAttrs::parse(s); + + assert_eq!(s.variants().len(), 1, "structs must have one variant"); + + let variant: &synstructure::VariantInfo = &s.variants()[0]; + let encode_static = variant.each(|binding| { + if should_skip_field_binding(binding) { + quote! {} + } else { + quote! { + ::fuel_types::canonical::Serialize::encode_static(#binding, buffer)?; + } + } + }); + + let encode_dynamic = variant.each(|binding| { + if should_skip_field_binding(binding) { + quote! {} + } else { + quote! { + ::fuel_types::canonical::Serialize::encode_dynamic(#binding, buffer)?; + } + } + }); + + let size_dynamic_code = variant.each(|binding| { + if should_skip_field_binding(binding) { + quote! {} + } else { + quote! { + size = ::fuel_types::canonical::add_sizes(size, #binding.size_dynamic()); + } + } + }); + let size_dynamic_code = + quote! { let mut size = 0; match self { #size_dynamic_code}; size }; + + let prefix = if let Some(prefix_type) = attrs.prefix.as_ref() { + quote! { + let prefix: u64 = #prefix_type.into(); + ::encode(&prefix, buffer)?; + } + } else { + quote! {} + }; + + let (mut size_static, size_no_dynamic) = constsize_fields(variant.ast().fields); + if attrs.prefix.is_some() { + size_static = quote! { ::fuel_types::canonical::add_sizes(#size_static, 8) }; + }; + + s.gen_impl(quote! { + gen impl ::fuel_types::canonical::Serialize for @Self { + #[inline(always)] + fn encode_static(&self, buffer: &mut O) -> ::core::result::Result<(), ::fuel_types::canonical::Error> { + #prefix + match self { + #encode_static + }; + + ::core::result::Result::Ok(()) + } + + fn encode_dynamic(&self, buffer: &mut O) -> ::core::result::Result<(), ::fuel_types::canonical::Error> { + match self { + #encode_dynamic + }; + + ::core::result::Result::Ok(()) + } + + #[inline(always)] + fn size_dynamic(&self) -> usize { + #size_dynamic_code + } + + const SIZE_NO_DYNAMIC: bool = #size_no_dynamic; + } + + gen impl ::fuel_types::canonical::SerializedSizeFixed for @Self { + const SIZE_STATIC: usize = #size_static; + } + }) +} + +fn serialize_enum(s: &synstructure::Structure) -> TokenStream2 { + let repr = parse_enum_repr(&s.ast().attrs); + let attrs = EnumAttrs::parse(s); + + assert!(!s.variants().is_empty(), "got invalid empty enum"); + let mut next_discriminant = 0u64; + let encode_static = s.variants().iter().map(|v| { + let pat = v.pat(); + let encode_static_iter = v.bindings().iter().map(|binding| { + if should_skip_field_binding(binding) { + quote! {} + } else { + quote! { + ::fuel_types::canonical::Serialize::encode_static(#binding, buffer)?; + } + } + }); + + // Handle #[canonical(inner_discriminant = Type)] + let discr = if let Some(discr_type) = attrs.inner_discriminant.as_ref() { + if v.ast().discriminant.is_some() { + panic!("User-specified discriminants are not supported with #[canonical(discriminant = Type)]") + } + quote! { { + #discr_type::from(self).into() + } } + } else { + if let Some((_, d)) = v.ast().discriminant { + next_discriminant = evaluate_simple_expr(d).expect("Unable to evaluate discriminant expression"); + }; + let v = next_discriminant; + next_discriminant = next_discriminant.checked_add(1).expect("Discriminant overflow"); + quote! { #v } + }; + + let encode_discriminant = if attrs.inner_discriminant.is_some() { + quote! {} + } else { + quote! { + <::core::primitive::u64 as ::fuel_types::canonical::Serialize>::encode(&#discr, buffer)?; + } + }; + + quote! { + #pat => { + #encode_discriminant + #( + { #encode_static_iter } + )* + } + } + }); + let encode_dynamic = s.variants().iter().map(|v| { + let encode_dynamic_iter = v.each(|binding| { + if should_skip_field_binding(binding) { + quote! {} + } else { + quote! { + ::fuel_types::canonical::Serialize::encode_dynamic(#binding, buffer)?; + } + } + }); + quote! { + #encode_dynamic_iter + } + }); + + // Handle #[canonical(serialize_with = function)] + let data_helper = attrs.serialize_with.as_ref(); + let size_helper_static = attrs.serialized_size_static_with.as_ref(); + let size_helper_dynamic = attrs.serialized_size_dynamic_with.as_ref(); + if let (Some(data_helper), Some(size_helper_static), Some(size_helper_dynamic)) = + (data_helper, size_helper_static, size_helper_dynamic) + { + let size_no_dynamic = attrs + .SIZE_NO_DYNAMIC + .expect("serialize_with requires SIZE_NO_DYNAMIC key"); + + return s.gen_impl(quote! { + gen impl ::fuel_types::canonical::Serialize for @Self { + const SIZE_NO_DYNAMIC: bool = #size_no_dynamic; + + #[inline(always)] + fn encode_static(&self, buffer: &mut O) -> ::core::result::Result<(), ::fuel_types::canonical::Error> { + #data_helper(self, buffer) + } + + fn encode_dynamic(&self, buffer: &mut O) -> ::core::result::Result<(), ::fuel_types::canonical::Error> { + ::core::result::Result::Ok(()) + } + + fn size_dynamic(&self) -> usize { + #size_helper_dynamic(self) + } + } + + gen impl ::fuel_types::canonical::SerializedSize for @Self { + #[inline(always)] + fn size_static(&self) -> usize { + #size_helper_static(self) + } + } + }) + } else if ![ + data_helper.is_none(), + size_helper_static.is_none(), + size_helper_dynamic.is_none(), + ] + .into_iter() + .all(|v| v) + { + panic!("serialize_with, serialized_size_static_with and serialized_size_dynamic_with must be used together"); + } + + let (variant_size_static, size_no_dynamic): (Vec, Vec) = + s.variants() + .iter() + .map(|v| constsize_fields(v.ast().fields)) + .unzip(); + + let size_no_dynamic = size_no_dynamic.iter().fold(quote! { true }, |acc, item| { + quote! { + #acc && #item + } + }); + + // #[repr(int)] types have a known static size + let impl_size = if let Some(repr) = repr { + // Repr size, already aligned up + let repr_size: usize = match repr.as_str() { + "u8" => 8, + "u16" => 8, + "u32" => 8, + "u64" => 8, + "u128" => 16, + _ => panic!("Unknown repr: {}", repr), + }; + s.gen_impl(quote! { + gen impl ::fuel_types::canonical::SerializedSizeFixed for @Self { + const SIZE_STATIC: usize = #repr_size; + } + }) + } else { + let match_size_static: TokenStream2 = s + .variants() + .iter() + .zip(variant_size_static) + .map(|(variant, static_size_code)| { + let p = variant.pat(); + quote! { #p => #static_size_code, } + }) + .collect(); + let match_size_static = quote! { match self { #match_size_static } }; + let discr_size = if attrs.inner_discriminant.is_some() { + quote! { 0usize } + } else { + quote! { 8usize } + }; + s.gen_impl(quote! { + gen impl ::fuel_types::canonical::SerializedSize for @Self { + fn size_static(&self) -> usize { + ::fuel_types::canonical::add_sizes(#discr_size, #match_size_static) + } + } + }) + }; + + let match_size_dynamic: TokenStream2 = s + .variants() + .iter() + .map(|variant| { + variant.each(|binding| { + if should_skip_field_binding(binding) { + quote! {} + } else { + quote! { + size = ::fuel_types::canonical::add_sizes(size, #binding.size_dynamic()); + } + } + }) + }) + .collect(); + let match_size_dynamic = + quote! {{ let mut size = 0; match self { #match_size_dynamic } size }}; + + let impl_code = s.gen_impl(quote! { + gen impl ::fuel_types::canonical::Serialize for @Self { + #[inline(always)] + fn encode_static(&self, buffer: &mut O) -> ::core::result::Result<(), ::fuel_types::canonical::Error> { + match self { + #( + #encode_static + )*, + _ => return ::core::result::Result::Err(::fuel_types::canonical::Error::UnknownDiscriminant), + }; + + ::core::result::Result::Ok(()) + } + + fn encode_dynamic(&self, buffer: &mut O) -> ::core::result::Result<(), ::fuel_types::canonical::Error> { + match self { + #( + #encode_dynamic + )*, + _ => return ::core::result::Result::Err(::fuel_types::canonical::Error::UnknownDiscriminant), + }; + + ::core::result::Result::Ok(()) + } + + #[inline(always)] + fn size_dynamic(&self) -> usize { + #match_size_dynamic + } + + const SIZE_NO_DYNAMIC: bool = #size_no_dynamic; + } + }); + + quote! { + #impl_code + #impl_size + } +} + +fn is_sized_primitive(type_name: &str) -> bool { + // This list contains types that are known to be primitive types, + // including common aliases of them. These types cannot be resolved + // further, and their size is known at compile time. + [ + "u8", + "u16", + "u32", + "u64", + "u128", + "usize", + "i8", + "i16", + "i32", + "i64", + "i128", + "isize", + "Word", + "RawInstruction", + ] + .contains(&type_name) +} + +#[derive(Debug)] +enum TypeSize { + /// The size of the type is known at parse time. + Constant(usize), + /// The size of the type is known at compile time, and + /// the following token stream computes to it. + Computed(TokenStream2), +} + +impl TypeSize { + pub fn from_expr(expr: &syn::Expr) -> Self { + if let syn::Expr::Lit(lit) = expr { + if let syn::Lit::Int(ref int) = lit.lit { + if let Ok(value) = int.base10_parse::() { + return Self::Constant(value) + } + } + } + + Self::Computed(quote! { + #expr + }) + } +} + +impl quote::ToTokens for TypeSize { + fn to_tokens(&self, tokens: &mut TokenStream2) { + match self { + Self::Constant(value) => { + tokens.extend(quote! { + #value + }); + } + Self::Computed(expr) => { + tokens.extend(expr.clone()); + } + } + } +} + +// Used to sort constant first so that they can be folded left-to-right +impl PartialEq for TypeSize { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Constant(a), Self::Constant(b)) => a == b, + _ => false, + } + } +} + +// Used to sort constant first so that they can be folded left-to-right +impl PartialOrd for TypeSize { + fn partial_cmp(&self, other: &Self) -> Option { + Some(match (self, other) { + (Self::Constant(a), Self::Constant(b)) => a.cmp(b), + (Self::Computed(_), Self::Computed(_)) => Ordering::Equal, + (Self::Constant(_), Self::Computed(_)) => Ordering::Less, + (Self::Computed(_), Self::Constant(_)) => Ordering::Greater, + }) + } +} + +impl core::ops::Add for TypeSize { + type Output = Self; + + fn add(self, rhs: Self) -> Self { + match (self, rhs) { + (Self::Constant(a), Self::Constant(b)) => Self::Constant( + a.checked_add(b) + .expect("Size would overflow on compile time"), + ), + (Self::Computed(a), Self::Computed(b)) => Self::Computed(quote! { + ::fuel_types::canonical::add_sizes(#a, #b) + }), + (Self::Constant(a), Self::Computed(b)) => Self::Computed(quote! { + ::fuel_types::canonical::add_sizes(#a, #b) + }), + (Self::Computed(a), Self::Constant(b)) => Self::Computed(quote! { + ::fuel_types::canonical::add_sizes(#a, #b) + }), + } + } +} + +impl core::ops::Mul for TypeSize { + type Output = Self; + + fn mul(self, rhs: Self) -> Self { + match (self, rhs) { + (Self::Constant(a), Self::Constant(b)) => Self::Constant(a * b), + (Self::Computed(a), Self::Computed(b)) => Self::Computed(quote! { + #a * #b + }), + (Self::Constant(a), Self::Computed(b)) => Self::Computed(quote! { + #a * #b + }), + (Self::Computed(a), Self::Constant(b)) => Self::Computed(quote! { + #a * #b + }), + } + } +} + +/// Determines serialized size of a type using `mem::size_of` if possible. +fn try_builtin_sized(ty: &syn::Type, align: bool) -> Option { + match ty { + syn::Type::Group(group) => try_builtin_sized(&group.elem, align), + syn::Type::Array(arr) => { + let elem_size = try_builtin_sized(arr.elem.as_ref(), false)?; + let elem_count = TypeSize::from_expr(&arr.len); + let unpadded_size = elem_size * elem_count; + Some(TypeSize::Computed( + quote! { ::fuel_types::canonical::aligned_size(#unpadded_size) }, + )) + } + syn::Type::Tuple(tup) => tup + .elems + .iter() + .map(|type_| try_builtin_sized(type_, true)) + .try_fold(TypeSize::Constant(0), |acc, item| Some(acc + item?)), + syn::Type::Path(p) => { + if p.qself.is_some() { + return None + } + + if !is_sized_primitive(p.path.get_ident()?.to_string().as_str()) { + return None + } + + Some(TypeSize::Computed(if align { + quote! { <#p as ::fuel_types::canonical::SerializedSizeFixed>::SIZE_STATIC } + } else { + quote! { + if <#p as ::fuel_types::canonical::Serialize>::UNALIGNED_BYTES { + 1 + } else { + <#p as ::fuel_types::canonical::SerializedSizeFixed>::SIZE_STATIC + } + } + })) + } + _ => { + panic!("Unsized type {:?}", ty); + } + } +} + +fn constsize_fields(fields: &syn::Fields) -> (TokenStream2, TokenStream2) { + let mut sizes: Vec<_> = fields + .iter() + .filter_map(|field| { + let type_ = &field.ty; + if should_skip_field(&field.attrs) { + None + } else if let Some(size_code) = try_builtin_sized(type_, true) { + Some(TypeSize::Computed(quote! { #size_code })) + } else { + Some(TypeSize::Computed(quote! { <#type_>::SIZE_STATIC })) + } + }) + .collect(); + sizes.sort_by(|a, b| a.partial_cmp(b).unwrap()); + let static_size: TokenStream2 = sizes.into_iter().fold(quote! { 0 }, |acc, item| { + quote! { + ::fuel_types::canonical::add_sizes(#acc, #item) + } + }); + + let no_dynamic_size: TokenStream2 = fields + .iter() + .filter_map(|field| { + let type_ = &field.ty; + if should_skip_field(&field.attrs) + || try_builtin_sized(type_, false).is_some() + { + return None + } + Some(quote! { + <#type_>::SIZE_NO_DYNAMIC + }) + }) + .fold(quote! { true }, |acc, item| { + quote! { + #acc && #item + } + }); + + (static_size, no_dynamic_size) +} + +/// Derives `Serialize` trait for the given `struct` or `enum`. +pub fn serialize_derive(mut s: synstructure::Structure) -> TokenStream2 { + s.add_bounds(synstructure::AddBounds::Fields) + .underscore_const(true); + + match s.ast().data { + syn::Data::Struct(_) => serialize_struct(&s), + syn::Data::Enum(_) => serialize_enum(&s), + _ => panic!("Can't derive `Serialize` for `union`s"), + } +} diff --git a/fuel-derive/src/utils.rs b/fuel-derive/src/utils.rs new file mode 100644 index 0000000000..ddb5adcb2c --- /dev/null +++ b/fuel-derive/src/utils.rs @@ -0,0 +1,47 @@ +#![allow(dead_code)] // This is useful for debugging, so keep it around + +use std::{ + fs, + io, + path::{ + Path, + PathBuf, + }, + process::{ + Command, + Stdio, + }, + sync::OnceLock, +}; + +static WORKSPACE: OnceLock = OnceLock::new(); + +pub fn workspace_dir() -> &'static PathBuf { + WORKSPACE.get_or_init(|| { + let output = std::process::Command::new(env!("CARGO")) + .arg("locate-project") + .arg("--workspace") + .arg("--message-format=plain") + .output() + .unwrap() + .stdout; + let cargo_path = Path::new(std::str::from_utf8(&output).unwrap().trim()); + cargo_path.parent().unwrap().to_path_buf() + }) +} + +pub fn write_and_fmt, S: ToString>(path: P, code: S) -> io::Result<()> { + let path = workspace_dir().join(path); + fs::write(&path, code.to_string())?; + + // Format but ignore errors + if let Ok(mut p) = Command::new("rustfmt") + .arg(&path) + .stderr(Stdio::null()) + .spawn() + { + let _ = p.wait(); + } + + Ok(()) +} diff --git a/fuel-tx/Cargo.toml b/fuel-tx/Cargo.toml index 3a06dc8c7a..b3644e488f 100644 --- a/fuel-tx/Cargo.toml +++ b/fuel-tx/Cargo.toml @@ -11,18 +11,19 @@ repository = { workspace = true } description = "FuelVM transaction." [dependencies] -derivative = { version = "2.2.0", default-features = false, features = ["use_core"] } +derivative = { version = "2.2.0", default-features = false, features = ["use_core"], optional = true } fuel-asm = { workspace = true, default-features = false } fuel-crypto = { workspace = true, default-features = false } -fuel-merkle = { workspace = true, default-features = false } +fuel-merkle = { workspace = true, default-features = false, optional = true } fuel-types = { workspace = true, default-features = false } -itertools = { version = "0.10", default-features = false } -num-integer = { version = "0.1", default-features = false } +itertools = { version = "0.10", default-features = false, optional = true } +num-integer = { version = "0.1", default-features = false, optional = true } +num_enum = { version = "0.7", optional = true } rand = { version = "0.8", default-features = false, features = ["std_rng"], optional = true } serde = { version = "1.0", default-features = false, features = ["alloc", "derive"], optional = true } serde_json = { version = "1.0", default-features = false, features = ["alloc"], optional = true } -strum = "0.24" -strum_macros = "0.24" +strum = { version = "0.24", optional = true } +strum_macros = { version = "0.24", optional = true } [dev-dependencies] bincode = { workspace = true } @@ -39,7 +40,7 @@ rstest = "0.15" [features] default = ["fuel-asm/default", "fuel-crypto/default", "fuel-merkle/default", "fuel-types/default", "std"] -alloc = ["fuel-types/alloc", "itertools/use_alloc"] +alloc = ["fuel-types/alloc", "itertools/use_alloc", "derivative", "fuel-merkle", "num_enum", "num-integer", "strum", "strum_macros"] builder = ["alloc", "internals"] internals = [] random = ["fuel-crypto/random", "fuel-types/random", "rand"] diff --git a/fuel-tx/src/contract.rs b/fuel-tx/src/contract.rs index 43cb90bbe5..eeb2bbad9d 100644 --- a/fuel-tx/src/contract.rs +++ b/fuel-tx/src/contract.rs @@ -45,6 +45,7 @@ fn next_multiple(x: usize) -> usize { #[derive(Default, Derivative, Clone, PartialEq, Eq, Hash)] #[derivative(Debug)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[derive(fuel_types::canonical::Deserialize, fuel_types::canonical::Serialize)] /// Deployable representation of a contract code. pub struct Contract( #[derivative(Debug(format_with = "fmt_truncated_hex::<16>"))] Vec, diff --git a/fuel-tx/src/lib.rs b/fuel-tx/src/lib.rs index 941960e99b..bb68b2535c 100644 --- a/fuel-tx/src/lib.rs +++ b/fuel-tx/src/lib.rs @@ -99,10 +99,8 @@ pub use transaction::{ }; #[cfg(feature = "std")] -pub use transaction::{ - Signable, - UniqueIdentifier, -}; +pub use transaction::Signable; +pub use transaction::UniqueIdentifier; #[cfg(feature = "alloc")] #[allow(deprecated)] diff --git a/fuel-tx/src/receipt.rs b/fuel-tx/src/receipt.rs index 4db5a76f7f..1f8914367a 100644 --- a/fuel-tx/src/receipt.rs +++ b/fuel-tx/src/receipt.rs @@ -1,25 +1,14 @@ -use crate::{ - receipt::sizes::{ - CallSizesLayout, - LogDataSizesLayout, - LogSizesLayout, - MessageOutSizesLayout, - PanicSizesLayout, - ReturnDataSizesLayout, - ReturnSizesLayout, - RevertSizesLayout, - ScriptResultSizesLayout, - TransferOutSizesLayout, - TransferSizesLayout, - }, - Output, -}; +use crate::Output; use alloc::vec::Vec; use derivative::Derivative; use fuel_asm::PanicInstruction; use fuel_crypto::Hasher; use fuel_types::{ - bytes::SizedBytes, + canonical::{ + Deserialize, + Serialize, + SerializedSizeFixed, + }, fmt_option_truncated_hex, Address, AssetId, @@ -30,26 +19,15 @@ use fuel_types::{ Word, }; -#[cfg(feature = "std")] -mod receipt_std; - mod receipt_repr; mod script_result; -mod sizes; -use receipt_repr::ReceiptRepr; - -use crate::{ - input::message::compute_message_id, - receipt::sizes::{ - BurnSizesLayout, - MintSizesLayout, - }, -}; +use crate::input::message::compute_message_id; pub use script_result::ScriptExecutionResult; #[derive(Clone, Derivative)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[derive(Deserialize, Serialize)] #[derivative(Eq, PartialEq, Hash, Debug)] pub enum Receipt { Call { @@ -80,6 +58,7 @@ pub enum Receipt { is: Word, #[derivative(Debug(format_with = "fmt_option_truncated_hex::<16>"))] #[derivative(PartialEq = "ignore", Hash = "ignore")] + #[canonical(skip)] data: Option>, }, @@ -89,6 +68,7 @@ pub enum Receipt { pc: Word, is: Word, #[derivative(PartialEq = "ignore", Hash = "ignore")] + #[canonical(skip)] contract_id: Option, }, @@ -120,6 +100,7 @@ pub enum Receipt { is: Word, #[derivative(Debug(format_with = "fmt_option_truncated_hex::<16>"))] #[derivative(PartialEq = "ignore", Hash = "ignore")] + #[canonical(skip)] data: Option>, }, @@ -155,6 +136,7 @@ pub enum Receipt { digest: Bytes32, #[derivative(Debug(format_with = "fmt_option_truncated_hex::<16>"))] #[derivative(PartialEq = "ignore", Hash = "ignore")] + #[canonical(skip)] data: Option>, }, Mint { @@ -719,24 +701,6 @@ impl Receipt { _ => None, } } - - fn variant_len_without_data(variant: ReceiptRepr) -> usize { - match variant { - ReceiptRepr::Call => CallSizesLayout::LEN, - ReceiptRepr::Return => ReturnSizesLayout::LEN, - ReceiptRepr::ReturnData => ReturnDataSizesLayout::LEN, - ReceiptRepr::Panic => PanicSizesLayout::LEN, - ReceiptRepr::Revert => RevertSizesLayout::LEN, - ReceiptRepr::Log => LogSizesLayout::LEN, - ReceiptRepr::LogData => LogDataSizesLayout::LEN, - ReceiptRepr::Transfer => TransferSizesLayout::LEN, - ReceiptRepr::TransferOut => TransferOutSizesLayout::LEN, - ReceiptRepr::ScriptResult => ScriptResultSizesLayout::LEN, - ReceiptRepr::MessageOut => MessageOutSizesLayout::LEN, - ReceiptRepr::Mint => MintSizesLayout::LEN, - ReceiptRepr::Burn => BurnSizesLayout::LEN, - } - } } fn trim_contract_id(id: Option<&ContractId>) -> Option<&ContractId> { @@ -749,12 +713,6 @@ fn trim_contract_id(id: Option<&ContractId>) -> Option<&ContractId> { }) } -impl SizedBytes for Receipt { - fn serialized_size(&self) -> usize { - Self::variant_len_without_data(ReceiptRepr::from(self)) - } -} - #[cfg(test)] mod tests { use crate::Receipt; diff --git a/fuel-tx/src/receipt/receipt_std.rs b/fuel-tx/src/receipt/receipt_std.rs deleted file mode 100644 index ed39be9f6f..0000000000 --- a/fuel-tx/src/receipt/receipt_std.rs +++ /dev/null @@ -1,677 +0,0 @@ -use super::{ - Receipt, - ReceiptRepr, -}; - -use fuel_asm::PanicInstruction; -use fuel_types::{ - bytes::{ - self, - SizedBytes, - WORD_SIZE, - }, - MemLayout, - MemLocType, - Word, -}; - -use crate::receipt::{ - script_result::ScriptExecutionResult, - sizes::CallSizes, -}; -use std::io::{ - self, - Write, -}; - -use crate::receipt::sizes::*; - -impl io::Read for Receipt { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - let len = self.serialized_size(); - - if buf.len() < len { - return Err(bytes::eof()) - } - - match self { - Self::Call { - id, - to, - amount, - asset_id, - gas, - param1, - param2, - pc, - is, - } => { - type S = CallSizes; - const LEN: usize = CallSizes::LEN; - let buf: &mut [_; LEN] = buf - .get_mut(..LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - bytes::store_number_at( - buf, - S::layout(S::LAYOUT.repr), - ReceiptRepr::Call as u8, - ); - - bytes::store_at(buf, S::layout(S::LAYOUT.id), id); - bytes::store_at(buf, S::layout(S::LAYOUT.to), to); - bytes::store_number_at(buf, S::layout(S::LAYOUT.amount), *amount); - bytes::store_at(buf, S::layout(S::LAYOUT.asset_id), asset_id); - bytes::store_number_at(buf, S::layout(S::LAYOUT.gas), *gas); - bytes::store_number_at(buf, S::layout(S::LAYOUT.param1), *param1); - bytes::store_number_at(buf, S::layout(S::LAYOUT.param2), *param2); - bytes::store_number_at(buf, S::layout(S::LAYOUT.pc), *pc); - bytes::store_number_at(buf, S::layout(S::LAYOUT.is), *is); - } - - Self::Return { id, val, pc, is } => { - type S = ReturnSizes; - let buf: &mut [_; S::LEN] = buf - .get_mut(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - bytes::store_number_at( - buf, - S::layout(S::LAYOUT.repr), - ReceiptRepr::Return as u8, - ); - - bytes::store_at(buf, S::layout(S::LAYOUT.id), id); - bytes::store_number_at(buf, S::layout(S::LAYOUT.val), *val); - bytes::store_number_at(buf, S::layout(S::LAYOUT.pc), *pc); - bytes::store_number_at(buf, S::layout(S::LAYOUT.is), *is); - } - - Self::ReturnData { - id, - ptr, - len, - digest, - pc, - is, - .. - } => { - let full_buf = buf; - type S = ReturnDataSizes; - let buf: &mut [_; S::LEN] = full_buf - .get_mut(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - bytes::store_number_at( - buf, - S::layout(S::LAYOUT.repr), - ReceiptRepr::ReturnData as u8, - ); - - bytes::store_at(buf, S::layout(S::LAYOUT.id), id); - bytes::store_number_at(buf, S::layout(S::LAYOUT.ptr), *ptr); - bytes::store_number_at(buf, S::layout(S::LAYOUT.len), *len); - bytes::store_at(buf, S::layout(S::LAYOUT.digest), digest); - bytes::store_number_at(buf, S::layout(S::LAYOUT.pc), *pc); - bytes::store_number_at(buf, S::layout(S::LAYOUT.is), *is); - } - - Self::Panic { - id, reason, pc, is, .. - } => { - type S = PanicSizes; - let buf: &mut [_; S::LEN] = buf - .get_mut(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - bytes::store_number_at( - buf, - S::layout(S::LAYOUT.repr), - ReceiptRepr::Panic as u8, - ); - - bytes::store_at(buf, S::layout(S::LAYOUT.id), id); - bytes::store_number_at( - buf, - S::layout(S::LAYOUT.reason), - Word::from(*reason), - ); - bytes::store_number_at(buf, S::layout(S::LAYOUT.pc), *pc); - bytes::store_number_at(buf, S::layout(S::LAYOUT.is), *is); - } - - Self::Revert { id, ra, pc, is } => { - type S = RevertSizes; - let buf: &mut [_; S::LEN] = buf - .get_mut(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - bytes::store_number_at( - buf, - S::layout(S::LAYOUT.repr), - ReceiptRepr::Revert as u8, - ); - - bytes::store_at(buf, S::layout(S::LAYOUT.id), id); - bytes::store_number_at(buf, S::layout(S::LAYOUT.ra), *ra); - bytes::store_number_at(buf, S::layout(S::LAYOUT.pc), *pc); - bytes::store_number_at(buf, S::layout(S::LAYOUT.is), *is); - } - - Self::Log { - id, - ra, - rb, - rc, - rd, - pc, - is, - } => { - type S = LogSizes; - let buf: &mut [_; S::LEN] = buf - .get_mut(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - bytes::store_number_at( - buf, - S::layout(S::LAYOUT.repr), - ReceiptRepr::Log as u8, - ); - - bytes::store_at(buf, S::layout(S::LAYOUT.id), id); - bytes::store_number_at(buf, S::layout(S::LAYOUT.ra), *ra); - bytes::store_number_at(buf, S::layout(S::LAYOUT.rb), *rb); - bytes::store_number_at(buf, S::layout(S::LAYOUT.rc), *rc); - bytes::store_number_at(buf, S::layout(S::LAYOUT.rd), *rd); - bytes::store_number_at(buf, S::layout(S::LAYOUT.pc), *pc); - bytes::store_number_at(buf, S::layout(S::LAYOUT.is), *is); - } - - Self::LogData { - id, - ra, - rb, - ptr, - len, - digest, - pc, - is, - .. - } => { - let full_buf = buf; - type S = LogDataSizes; - let buf: &mut [_; S::LEN] = full_buf - .get_mut(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - bytes::store_number_at( - buf, - S::layout(S::LAYOUT.repr), - ReceiptRepr::LogData as u8, - ); - - bytes::store_at(buf, S::layout(S::LAYOUT.id), id); - bytes::store_number_at(buf, S::layout(S::LAYOUT.ra), *ra); - bytes::store_number_at(buf, S::layout(S::LAYOUT.rb), *rb); - bytes::store_number_at(buf, S::layout(S::LAYOUT.ptr), *ptr); - bytes::store_number_at(buf, S::layout(S::LAYOUT.len), *len); - bytes::store_at(buf, S::layout(S::LAYOUT.digest), digest); - bytes::store_number_at(buf, S::layout(S::LAYOUT.pc), *pc); - bytes::store_number_at(buf, S::layout(S::LAYOUT.is), *is); - } - - Self::Transfer { - id, - to, - amount, - asset_id, - pc, - is, - } => { - type S = TransferSizes; - let buf: &mut [_; S::LEN] = buf - .get_mut(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - bytes::store_number_at( - buf, - S::layout(S::LAYOUT.repr), - ReceiptRepr::Transfer as u8, - ); - - bytes::store_at(buf, S::layout(S::LAYOUT.id), id); - bytes::store_at(buf, S::layout(S::LAYOUT.to), to); - bytes::store_number_at(buf, S::layout(S::LAYOUT.amount), *amount); - bytes::store_at(buf, S::layout(S::LAYOUT.asset_id), asset_id); - bytes::store_number_at(buf, S::layout(S::LAYOUT.pc), *pc); - bytes::store_number_at(buf, S::layout(S::LAYOUT.is), *is); - } - - Self::TransferOut { - id, - to, - amount, - asset_id, - pc, - is, - } => { - type S = TransferOutSizes; - let buf: &mut [_; S::LEN] = buf - .get_mut(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - bytes::store_number_at( - buf, - S::layout(S::LAYOUT.repr), - ReceiptRepr::TransferOut as u8, - ); - - bytes::store_at(buf, S::layout(S::LAYOUT.id), id); - bytes::store_at(buf, S::layout(S::LAYOUT.to), to); - bytes::store_number_at(buf, S::layout(S::LAYOUT.amount), *amount); - bytes::store_at(buf, S::layout(S::LAYOUT.asset_id), asset_id); - bytes::store_number_at(buf, S::layout(S::LAYOUT.pc), *pc); - bytes::store_number_at(buf, S::layout(S::LAYOUT.is), *is); - } - - Self::ScriptResult { result, gas_used } => { - type S = ScriptResultSizes; - let buf: &mut [_; S::LEN] = buf - .get_mut(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - bytes::store_number_at( - buf, - S::layout(S::LAYOUT.repr), - ReceiptRepr::ScriptResult as u8, - ); - - let result = Word::from(*result); - bytes::store_number_at(buf, S::layout(S::LAYOUT.result), result); - bytes::store_number_at(buf, S::layout(S::LAYOUT.gas_used), *gas_used); - } - - Self::MessageOut { - sender, - recipient, - amount, - nonce, - len, - digest, - .. - } => { - let full_buf = buf; - type S = MessageOutSizes; - let buf: &mut [_; S::LEN] = full_buf - .get_mut(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - bytes::store_number_at( - buf, - S::layout(S::LAYOUT.repr), - ReceiptRepr::MessageOut as u8, - ); - - bytes::store_at(buf, S::layout(S::LAYOUT.sender), sender); - bytes::store_at(buf, S::layout(S::LAYOUT.recipient), recipient); - bytes::store_number_at(buf, S::layout(S::LAYOUT.amount), *amount); - bytes::store_at(buf, S::layout(S::LAYOUT.nonce), nonce); - bytes::store_number_at(buf, S::layout(S::LAYOUT.len), *len); - bytes::store_at(buf, S::layout(S::LAYOUT.digest), digest); - } - Receipt::Mint { - sub_id, - contract_id, - val, - pc, - is, - } => { - type S = MintSizes; - let buf: &mut [_; S::LEN] = buf - .get_mut(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - bytes::store_number_at( - buf, - S::layout(S::LAYOUT.repr), - ReceiptRepr::Mint as u8, - ); - - bytes::store_at(buf, S::layout(S::LAYOUT.sub_id), sub_id); - bytes::store_at(buf, S::layout(S::LAYOUT.contract_id), contract_id); - bytes::store_number_at(buf, S::layout(S::LAYOUT.val), *val); - bytes::store_number_at(buf, S::layout(S::LAYOUT.pc), *pc); - bytes::store_number_at(buf, S::layout(S::LAYOUT.is), *is); - } - Receipt::Burn { - sub_id, - contract_id, - val, - pc, - is, - } => { - type S = BurnSizes; - let buf: &mut [_; S::LEN] = buf - .get_mut(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - bytes::store_number_at( - buf, - S::layout(S::LAYOUT.repr), - ReceiptRepr::Burn as u8, - ); - - bytes::store_at(buf, S::layout(S::LAYOUT.sub_id), sub_id); - bytes::store_at(buf, S::layout(S::LAYOUT.contract_id), contract_id); - bytes::store_number_at(buf, S::layout(S::LAYOUT.val), *val); - bytes::store_number_at(buf, S::layout(S::LAYOUT.pc), *pc); - bytes::store_number_at(buf, S::layout(S::LAYOUT.is), *is); - } - } - - Ok(len) - } -} - -impl io::Write for Receipt { - fn write(&mut self, full_buf: &[u8]) -> io::Result { - let identifier: &[_; WORD_SIZE] = full_buf - .get(..WORD_SIZE) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - // Safety: buf len is checked - let identifier = bytes::restore_word(bytes::from_array(identifier)); - let identifier = ReceiptRepr::try_from(identifier)?; - - match identifier { - ReceiptRepr::Call => { - type S = CallSizes; - let buf: &[_; S::LEN] = full_buf - .get(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - let id = bytes::restore_at(buf, S::layout(S::LAYOUT.id)); - let to = bytes::restore_at(buf, S::layout(S::LAYOUT.to)); - let amount = bytes::restore_word_at(buf, S::layout(S::LAYOUT.amount)); - let asset_id = bytes::restore_at(buf, S::layout(S::LAYOUT.asset_id)); - let gas = bytes::restore_word_at(buf, S::layout(S::LAYOUT.gas)); - let param1 = bytes::restore_word_at(buf, S::layout(S::LAYOUT.param1)); - let param2 = bytes::restore_word_at(buf, S::layout(S::LAYOUT.param2)); - let pc = bytes::restore_word_at(buf, S::layout(S::LAYOUT.pc)); - let is = bytes::restore_word_at(buf, S::layout(S::LAYOUT.is)); - - let id = id.into(); - let to = to.into(); - let asset_id = asset_id.into(); - - *self = Self::call(id, to, amount, asset_id, gas, param1, param2, pc, is); - } - - ReceiptRepr::Return => { - type S = ReturnSizes; - let buf: &[_; S::LEN] = full_buf - .get(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - let id = bytes::restore_at(buf, S::layout(S::LAYOUT.id)); - let val = bytes::restore_word_at(buf, S::layout(S::LAYOUT.val)); - let pc = bytes::restore_word_at(buf, S::layout(S::LAYOUT.pc)); - let is = bytes::restore_word_at(buf, S::layout(S::LAYOUT.is)); - - let id = id.into(); - - *self = Self::ret(id, val, pc, is); - } - - ReceiptRepr::ReturnData => { - type S = ReturnDataSizes; - let buf: &[_; S::LEN] = full_buf - .get(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - let id = bytes::restore_at(buf, S::layout(S::LAYOUT.id)); - let ptr = bytes::restore_word_at(buf, S::layout(S::LAYOUT.ptr)); - let len = bytes::restore_word_at(buf, S::layout(S::LAYOUT.len)); - let digest = bytes::restore_at(buf, S::layout(S::LAYOUT.digest)); - let pc = bytes::restore_word_at(buf, S::layout(S::LAYOUT.pc)); - let is = bytes::restore_word_at(buf, S::layout(S::LAYOUT.is)); - - let id = id.into(); - let digest = digest.into(); - - *self = Self::return_data_with_len(id, ptr, len, digest, pc, is, None); - } - - ReceiptRepr::Panic => { - type S = PanicSizes; - let buf: &[_; S::LEN] = full_buf - .get(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - let id = bytes::restore_at(buf, S::layout(S::LAYOUT.id)); - let reason = bytes::restore_word_at(buf, S::layout(S::LAYOUT.reason)); - let pc = bytes::restore_word_at(buf, S::layout(S::LAYOUT.pc)); - let is = bytes::restore_word_at(buf, S::layout(S::LAYOUT.is)); - - let id = id.into(); - - *self = Self::panic(id, PanicInstruction::from(reason), pc, is); - } - - ReceiptRepr::Revert => { - type S = RevertSizes; - let buf: &[_; S::LEN] = full_buf - .get(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - let id = bytes::restore_at(buf, S::layout(S::LAYOUT.id)); - let ra = bytes::restore_word_at(buf, S::layout(S::LAYOUT.ra)); - let pc = bytes::restore_word_at(buf, S::layout(S::LAYOUT.pc)); - let is = bytes::restore_word_at(buf, S::layout(S::LAYOUT.is)); - - let id = id.into(); - - *self = Self::revert(id, ra, pc, is); - } - - ReceiptRepr::Log => { - type S = LogSizes; - let buf: &[_; S::LEN] = full_buf - .get(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - let id = bytes::restore_at(buf, S::layout(S::LAYOUT.id)); - let ra = bytes::restore_word_at(buf, S::layout(S::LAYOUT.ra)); - let rb = bytes::restore_word_at(buf, S::layout(S::LAYOUT.rb)); - let rc = bytes::restore_word_at(buf, S::layout(S::LAYOUT.rc)); - let rd = bytes::restore_word_at(buf, S::layout(S::LAYOUT.rd)); - let pc = bytes::restore_word_at(buf, S::layout(S::LAYOUT.pc)); - let is = bytes::restore_word_at(buf, S::layout(S::LAYOUT.is)); - - let id = id.into(); - - *self = Self::log(id, ra, rb, rc, rd, pc, is); - } - - ReceiptRepr::LogData => { - type S = LogDataSizes; - let buf: &[_; S::LEN] = full_buf - .get(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - let id = bytes::restore_at(buf, S::layout(S::LAYOUT.id)); - let ra = bytes::restore_word_at(buf, S::layout(S::LAYOUT.ra)); - let rb = bytes::restore_word_at(buf, S::layout(S::LAYOUT.rb)); - let ptr = bytes::restore_word_at(buf, S::layout(S::LAYOUT.ptr)); - let len = bytes::restore_word_at(buf, S::layout(S::LAYOUT.len)); - let digest = bytes::restore_at(buf, S::layout(S::LAYOUT.digest)); - let pc = bytes::restore_word_at(buf, S::layout(S::LAYOUT.pc)); - let is = bytes::restore_word_at(buf, S::layout(S::LAYOUT.is)); - - let id = id.into(); - let digest = digest.into(); - - *self = - Self::log_data_with_len(id, ra, rb, ptr, len, digest, pc, is, None); - } - - ReceiptRepr::Transfer => { - type S = TransferSizes; - let buf: &[_; S::LEN] = full_buf - .get(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - let id = bytes::restore_at(buf, S::layout(S::LAYOUT.id)); - let to = bytes::restore_at(buf, S::layout(S::LAYOUT.to)); - let amount = bytes::restore_word_at(buf, S::layout(S::LAYOUT.amount)); - let asset_id = bytes::restore_at(buf, S::layout(S::LAYOUT.asset_id)); - let pc = bytes::restore_word_at(buf, S::layout(S::LAYOUT.pc)); - let is = bytes::restore_word_at(buf, S::layout(S::LAYOUT.is)); - - let id = id.into(); - let to = to.into(); - let asset_id = asset_id.into(); - - *self = Self::transfer(id, to, amount, asset_id, pc, is); - } - - ReceiptRepr::TransferOut => { - type S = TransferOutSizes; - let buf: &[_; S::LEN] = full_buf - .get(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - let id = bytes::restore_at(buf, S::layout(S::LAYOUT.id)); - let to = bytes::restore_at(buf, S::layout(S::LAYOUT.to)); - let amount = bytes::restore_word_at(buf, S::layout(S::LAYOUT.amount)); - let asset_id = bytes::restore_at(buf, S::layout(S::LAYOUT.asset_id)); - let pc = bytes::restore_word_at(buf, S::layout(S::LAYOUT.pc)); - let is = bytes::restore_word_at(buf, S::layout(S::LAYOUT.is)); - - let id = id.into(); - let to = to.into(); - let asset_id = asset_id.into(); - - *self = Self::transfer_out(id, to, amount, asset_id, pc, is); - } - - ReceiptRepr::ScriptResult => { - type S = ScriptResultSizes; - let buf: &[_; S::LEN] = full_buf - .get(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - let result = bytes::restore_word_at(buf, S::layout(S::LAYOUT.result)); - let gas_used = bytes::restore_word_at(buf, S::layout(S::LAYOUT.gas_used)); - - let result = ScriptExecutionResult::from(result); - - *self = Self::script_result(result, gas_used); - } - - ReceiptRepr::MessageOut => { - type S = MessageOutSizes; - let buf: &[_; S::LEN] = full_buf - .get(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - let sender = bytes::restore_at(buf, S::layout(S::LAYOUT.sender)); - let recipient = bytes::restore_at(buf, S::layout(S::LAYOUT.recipient)); - let amount = bytes::restore_word_at(buf, S::layout(S::LAYOUT.amount)); - let nonce = bytes::restore_at(buf, S::layout(S::LAYOUT.nonce)); - let len = bytes::restore_word_at(buf, S::layout(S::LAYOUT.len)); - let digest = bytes::restore_at(buf, S::layout(S::LAYOUT.digest)); - - let sender = sender.into(); - let recipient = recipient.into(); - let nonce = nonce.into(); - let digest = digest.into(); - - *self = Self::message_out_with_len( - sender, recipient, amount, nonce, len, digest, None, - ); - } - ReceiptRepr::Mint => { - type S = MintSizes; - let buf: &[_; S::LEN] = full_buf - .get(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - let sub_id = bytes::restore_at(buf, S::layout(S::LAYOUT.sub_id)); - let contract_id = - bytes::restore_at(buf, S::layout(S::LAYOUT.contract_id)); - let val = bytes::restore_word_at(buf, S::layout(S::LAYOUT.val)); - let pc = bytes::restore_word_at(buf, S::layout(S::LAYOUT.pc)); - let is = bytes::restore_word_at(buf, S::layout(S::LAYOUT.is)); - - let sub_id = sub_id.into(); - let contract_id = contract_id.into(); - - *self = Self::mint(sub_id, contract_id, val, pc, is); - } - ReceiptRepr::Burn => { - type S = BurnSizes; - let buf: &[_; S::LEN] = full_buf - .get(..S::LEN) - .and_then(|slice| slice.try_into().ok()) - .ok_or(bytes::eof())?; - - let sub_id = bytes::restore_at(buf, S::layout(S::LAYOUT.sub_id)); - let contract_id = - bytes::restore_at(buf, S::layout(S::LAYOUT.contract_id)); - let val = bytes::restore_word_at(buf, S::layout(S::LAYOUT.val)); - let pc = bytes::restore_word_at(buf, S::layout(S::LAYOUT.pc)); - let is = bytes::restore_word_at(buf, S::layout(S::LAYOUT.is)); - - let sub_id = sub_id.into(); - let contract_id = contract_id.into(); - - *self = Self::burn(sub_id, contract_id, val, pc, is); - } - } - - let n = self.serialized_size(); - Ok(n) - } - - fn flush(&mut self) -> io::Result<()> { - Ok(()) - } -} - -impl bytes::Deserializable for Receipt { - fn from_bytes(bytes: &[u8]) -> io::Result { - let mut instance = Self::ret(Default::default(), 0, 0, 0); - - // We are sure that all needed bytes are written or error would happen. - // unused let is here to silence clippy warning for this check. - let _ = instance.write(bytes)?; - - Ok(instance) - } -} diff --git a/fuel-tx/src/receipt/script_result.rs b/fuel-tx/src/receipt/script_result.rs index d0e739fe12..568e94439c 100644 --- a/fuel-tx/src/receipt/script_result.rs +++ b/fuel-tx/src/receipt/script_result.rs @@ -2,6 +2,8 @@ use fuel_types::Word; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[derive(fuel_types::canonical::Deserialize, fuel_types::canonical::Serialize)] +#[repr(u64)] pub enum ScriptExecutionResult { Success, Revert, diff --git a/fuel-tx/src/receipt/sizes.rs b/fuel-tx/src/receipt/sizes.rs deleted file mode 100644 index 6f7b7a43e5..0000000000 --- a/fuel-tx/src/receipt/sizes.rs +++ /dev/null @@ -1,160 +0,0 @@ -use fuel_asm::Word; -use fuel_types::{ - bytes::WORD_SIZE, - mem_layout, - Address, - AssetId, - Bytes32, - ContractId, - Nonce, -}; - -pub struct CallSizes; -mem_layout!( - CallSizesLayout for CallSizes - repr: u8 = WORD_SIZE, - id: ContractId = {ContractId::LEN}, - to: ContractId = {ContractId::LEN}, - amount: Word = WORD_SIZE, - asset_id: AssetId = {AssetId::LEN}, - gas: Word = WORD_SIZE, - param1: Word = WORD_SIZE, - param2: Word = WORD_SIZE, - pc: Word = WORD_SIZE, - is: Word = WORD_SIZE -); - -pub struct ReturnSizes; -mem_layout!( - ReturnSizesLayout for ReturnSizes - repr: u8 = WORD_SIZE, - id: ContractId = {ContractId::LEN}, - val: Word = WORD_SIZE, - pc: Word = WORD_SIZE, - is: Word = WORD_SIZE -); - -pub struct ReturnDataSizes; -mem_layout!( - ReturnDataSizesLayout for ReturnDataSizes - repr: u8 = WORD_SIZE, - id: ContractId = {ContractId::LEN}, - ptr: Word = WORD_SIZE, - len: Word = WORD_SIZE, - digest: Bytes32 = {Bytes32::LEN}, - pc: Word = WORD_SIZE, - is: Word = WORD_SIZE -); - -pub struct PanicSizes; -mem_layout!( - PanicSizesLayout for PanicSizes - repr: u8 = WORD_SIZE, - id: ContractId = {ContractId::LEN}, - reason: Word = WORD_SIZE, - pc: Word = WORD_SIZE, - is: Word = WORD_SIZE -); - -pub struct RevertSizes; -mem_layout!( - RevertSizesLayout for RevertSizes - repr: u8 = WORD_SIZE, - id: ContractId = {ContractId::LEN}, - ra: Word = WORD_SIZE, - pc: Word = WORD_SIZE, - is: Word = WORD_SIZE -); - -pub struct LogSizes; -mem_layout!( - LogSizesLayout for LogSizes - repr: u8 = WORD_SIZE, - id: ContractId = {ContractId::LEN}, - ra: Word = WORD_SIZE, - rb: Word = WORD_SIZE, - rc: Word = WORD_SIZE, - rd: Word = WORD_SIZE, - pc: Word = WORD_SIZE, - is: Word = WORD_SIZE -); - -pub struct LogDataSizes; -mem_layout!( - LogDataSizesLayout for LogDataSizes - repr: u8 = WORD_SIZE, - id: ContractId = {ContractId::LEN}, - ra: Word = WORD_SIZE, - rb: Word = WORD_SIZE, - ptr: Word = WORD_SIZE, - len: Word = WORD_SIZE, - digest: Bytes32 = {Bytes32::LEN}, - pc: Word = WORD_SIZE, - is: Word = WORD_SIZE -); - -pub struct TransferSizes; -mem_layout!( - TransferSizesLayout for TransferSizes - repr: u8 = WORD_SIZE, - id: ContractId = {ContractId::LEN}, - to: ContractId = {ContractId::LEN}, - amount: Word = WORD_SIZE, - asset_id: AssetId = {AssetId::LEN}, - pc: Word = WORD_SIZE, - is: Word = WORD_SIZE -); - -pub struct TransferOutSizes; -mem_layout!( - TransferOutSizesLayout for TransferOutSizes - repr: u8 = WORD_SIZE, - id: ContractId = {ContractId::LEN}, - to: Address = {Address::LEN}, - amount: Word = WORD_SIZE, - asset_id: AssetId = {AssetId::LEN}, - pc: Word = WORD_SIZE, - is: Word = WORD_SIZE -); - -pub struct ScriptResultSizes; -mem_layout!( - ScriptResultSizesLayout for ScriptResultSizes - repr: u8 = WORD_SIZE, - result: Word = WORD_SIZE, - gas_used: Word = WORD_SIZE -); - -pub struct MessageOutSizes; -mem_layout!( - MessageOutSizesLayout for MessageOutSizes - repr: u8 = WORD_SIZE, - sender: Address = {Address::LEN}, - recipient: Address = {Address::LEN}, - amount: Word = WORD_SIZE, - nonce: Nonce = {Nonce::LEN}, - len: Word = WORD_SIZE, - digest: Bytes32 = {Bytes32::LEN} -); - -pub struct MintSizes; -mem_layout!( - MintSizesLayout for MintSizes - repr: u8 = WORD_SIZE, - sub_id: Bytes32 = {Bytes32::LEN}, - contract_id: ContractId = {ContractId::LEN}, - val: Word = WORD_SIZE, - pc: Word = WORD_SIZE, - is: Word = WORD_SIZE -); - -pub struct BurnSizes; -mem_layout!( - BurnSizesLayout for BurnSizes - repr: u8 = WORD_SIZE, - sub_id: Bytes32 = {Bytes32::LEN}, - contract_id: ContractId = {ContractId::LEN}, - val: Word = WORD_SIZE, - pc: Word = WORD_SIZE, - is: Word = WORD_SIZE -); diff --git a/fuel-tx/src/tests/bytes.rs b/fuel-tx/src/tests/bytes.rs index 8bf2471f5c..1f9a81d853 100644 --- a/fuel-tx/src/tests/bytes.rs +++ b/fuel-tx/src/tests/bytes.rs @@ -10,6 +10,11 @@ use fuel_tx_test_helpers::{ }; use fuel_types::{ bytes, + canonical::{ + Deserialize, + Serialize, + SerializedSize, + }, Immediate24, }; use rand::{ @@ -25,31 +30,19 @@ use fuel_tx::field::{ Script, ScriptData, }; -use std::{ - fmt, - io::{ - self, - Read, - Write, - }, -}; +use std::fmt; use strum::IntoEnumIterator; pub fn assert_encoding_correct(data: &[T]) where - T: Read - + Write + T: Serialize + + Deserialize + fmt::Debug + Clone + PartialEq - + bytes::SizedBytes - + bytes::SerializableVec - + bytes::Deserializable + serde::Serialize + for<'a> serde::Deserialize<'a>, { - let mut buffer; - for data in data.iter() { let d_s = bincode::serialize(&data).expect("Failed to serialize data"); // Safety: bincode/serde fails to understand the elision so this is a cheap way to @@ -59,49 +52,11 @@ where assert_eq!(&d_s, data); - let mut d = data.clone(); - - let d_bytes = data.clone().to_bytes(); - let d_p = T::from_bytes(d_bytes.as_slice()).expect("Failed to deserialize T"); - - assert_eq!(d, d_p); - - let mut d_p = data.clone(); - - buffer = vec![0u8; 2048]; - let read_size = d.read(buffer.as_mut_slice()).expect("Failed to read"); - let write_size = d_p.write(buffer.as_slice()).expect("Failed to write"); - - // Simple RW assertion - assert_eq!(d, d_p); - assert_eq!(read_size, write_size); + let mut d_bytes = Vec::new(); + data.clone().encode(&mut d_bytes).expect("Failed to encode"); + let d_p = T::decode(&mut &d_bytes[..]).expect("Failed to deserialize T"); - buffer = vec![0u8; read_size]; - - // Minimum size buffer assertion - let _ = d.read(buffer.as_mut_slice()).expect("Failed to read"); - let _ = d_p.write(buffer.as_slice()).expect("Failed to write"); - assert_eq!(d, d_p); - assert_eq!(d_bytes.as_slice(), buffer.as_slice()); - - // No panic assertion - loop { - buffer.pop(); - - let err = d - .read(buffer.as_mut_slice()) - .expect_err("Insufficient buffer should fail!"); - assert_eq!(io::ErrorKind::UnexpectedEof, err.kind()); - - let err = d_p - .write(buffer.as_slice()) - .expect_err("Insufficient buffer should fail!"); - assert_eq!(io::ErrorKind::UnexpectedEof, err.kind()); - - if buffer.is_empty() { - break - } - } + assert_eq!(*data, d_p); } } @@ -509,7 +464,6 @@ fn create_input_data_offset() { predicate_data, ); - let mut buffer = vec![0u8; 4096]; for storage_slot in storage_slots.iter() { for inputs in inputs.iter() { for outputs in outputs.iter() { @@ -522,7 +476,7 @@ fn create_input_data_offset() { let input_message_idx = inputs.len(); inputs.push(input_message.clone()); - let mut tx = Transaction::create( + let tx = Transaction::create( gas_price, gas_limit, maturity, @@ -536,10 +490,7 @@ fn create_input_data_offset() { let tx_p = tx.clone(); - buffer.iter_mut().for_each(|b| *b = 0x00); - let _ = tx - .read(buffer.as_mut_slice()) - .expect("Failed to serialize input"); + let bytes = tx.to_bytes(); let (offset, len) = tx .inputs_predicate_offset_at(input_coin_idx) @@ -553,7 +504,7 @@ fn create_input_data_offset() { assert_eq!(offset, offset_p); assert_eq!( predicate.as_slice(), - &buffer[offset..offset + len][..predicate.len()] + &bytes[offset..offset + len][..predicate.len()] ); let (offset, len) = tx @@ -568,7 +519,7 @@ fn create_input_data_offset() { assert_eq!(offset, offset_p); assert_eq!( predicate.as_slice(), - &buffer[offset..offset + len][..predicate.len()] + &bytes[offset..offset + len][..predicate.len()] ); } } @@ -636,7 +587,6 @@ fn script_input_coin_data_offset() { predicate_data, ); - let mut buffer = vec![0u8; 4096]; for script in script.iter() { for script_data in script_data.iter() { for inputs in inputs.iter() { @@ -646,7 +596,7 @@ fn script_input_coin_data_offset() { let offset = inputs.len(); inputs.push(input_coin.clone()); - let mut tx = Transaction::script( + let tx = Transaction::script( gas_price, gas_limit, maturity, @@ -661,16 +611,12 @@ fn script_input_coin_data_offset() { tx_p.precompute(&Default::default()) .expect("Should be able to calculate cache"); - buffer.iter_mut().for_each(|b| *b = 0x00); - - let _ = tx - .read(buffer.as_mut_slice()) - .expect("Failed to serialize input"); + let bytes = tx.to_bytes(); let script_offset = tx.script_offset(); assert_eq!( script.as_slice(), - &buffer[script_offset..script_offset + script.len()] + &bytes[script_offset..script_offset + script.len()] ); let script_data_offset = tx.script_data_offset(); @@ -680,7 +626,7 @@ fn script_input_coin_data_offset() { assert_eq!(script_data_offset, script_data_offset_p); assert_eq!( script_data.as_slice(), - &buffer[script_data_offset + &bytes[script_data_offset ..script_data_offset + script_data.len()] ); @@ -693,7 +639,7 @@ fn script_input_coin_data_offset() { assert_eq!( predicate.as_slice(), - &buffer[offset..offset + predicate.len()] + &bytes[offset..offset + predicate.len()] ); } } diff --git a/fuel-tx/src/tests/offset.rs b/fuel-tx/src/tests/offset.rs index c236cde346..7c097b4f07 100644 --- a/fuel-tx/src/tests/offset.rs +++ b/fuel-tx/src/tests/offset.rs @@ -12,9 +12,9 @@ use fuel_tx::{ }; use fuel_tx_test_helpers::TransactionFactory; use fuel_types::{ - bytes::{ - Deserializable, - SerializableVec, + canonical::{ + Deserialize, + SerializedSize, }, ChainId, }; @@ -329,7 +329,7 @@ fn tx_offset_create() { // pick a seed that, with low number of cases, will cover everything. TransactionFactory::<_, Create>::from_seed(1295) .take(number_cases) - .for_each(|(mut tx, _)| { + .for_each(|(tx, _)| { let bytes = tx.to_bytes(); cases.salt = true; @@ -398,7 +398,7 @@ fn tx_offset_script() { // pick a seed that, with low number of cases, will cover everything. TransactionFactory::<_, Script>::from_seed(1295) .take(number_cases) - .for_each(|(mut tx, _)| { + .for_each(|(tx, _)| { let bytes = tx.to_bytes(); common_parts_create_and_script(&tx, &bytes, &mut cases); }); @@ -437,7 +437,7 @@ fn tx_offset_mint() { // pick a seed that, with low number of cases, will cover everything. TransactionFactory::<_, Mint>::from_seed(1295) .take(number_cases) - .for_each(|mut tx| { + .for_each(|tx| { let bytes = tx.to_bytes(); let ofs = tx.tx_pointer_offset(); @@ -475,34 +475,34 @@ fn iow_offset() { tx.inputs().iter().enumerate().for_each(|(x, i)| { let offset = tx.inputs_offset_at(x).unwrap(); let offset_p = tx_p.inputs_offset_at(x).unwrap(); + assert_eq!(offset, offset_p); let input = Input::from_bytes(&bytes[offset..]) .expect("Failed to deserialize input!"); assert_eq!(i, &input); - assert_eq!(offset, offset_p); }); tx.outputs().iter().enumerate().for_each(|(x, o)| { let offset = tx.outputs_offset_at(x).unwrap(); let offset_p = tx_p.outputs_offset_at(x).unwrap(); + assert_eq!(offset, offset_p); let output = Output::from_bytes(&bytes[offset..]) .expect("Failed to deserialize output!"); assert_eq!(o, &output); - assert_eq!(offset, offset_p); }); tx.witnesses().iter().enumerate().for_each(|(x, w)| { let offset = tx.witnesses_offset_at(x).unwrap(); let offset_p = tx_p.witnesses_offset_at(x).unwrap(); + assert_eq!(offset, offset_p); let witness = Witness::from_bytes(&bytes[offset..]) .expect("Failed to deserialize witness!"); assert_eq!(w, &witness); - assert_eq!(offset, offset_p); }); let offset = tx.receipts_root_offset(); diff --git a/fuel-tx/src/tests/valid_cases/transaction.rs b/fuel-tx/src/tests/valid_cases/transaction.rs index 7e997b6dae..d69b34803d 100644 --- a/fuel-tx/src/tests/valid_cases/transaction.rs +++ b/fuel-tx/src/tests/valid_cases/transaction.rs @@ -9,6 +9,10 @@ use super::{ use fuel_crypto::SecretKey; use fuel_tx::*; use fuel_tx_test_helpers::generate_bytes; +use fuel_types::canonical::{ + Deserialize, + SerializedSize, +}; use rand::{ rngs::StdRng, Rng, @@ -16,10 +20,7 @@ use rand::{ SeedableRng, }; -use std::{ - cmp, - io::Write, -}; +use core::cmp; #[test] fn gas_limit() { @@ -767,14 +768,11 @@ fn create() { .check(block_height, &test_params()) .expect("Failed to validate the transaction"); - let mut slot_data = [0u8; 64]; - let mut slot = StorageSlot::default(); - let storage_slots = (0..CONTRACT_PARAMS.max_storage_slots) .map(|i| { - slot_data[..8].copy_from_slice(&i.to_be_bytes()); - let _ = slot.write(&slot_data).unwrap(); - slot.clone() + let mut slot_data = StorageSlot::default().to_bytes(); + slot_data[..8].copy_from_slice(&i.to_be_bytes()); // Force ordering + StorageSlot::from_bytes(&slot_data).unwrap() }) .collect::>(); diff --git a/fuel-tx/src/transaction.rs b/fuel-tx/src/transaction.rs index d374c2b7df..cc530fed5d 100644 --- a/fuel-tx/src/transaction.rs +++ b/fuel-tx/src/transaction.rs @@ -1,6 +1,6 @@ use fuel_crypto::PublicKey; use fuel_types::{ - bytes::SizedBytes, + canonical::SerializedSizeFixed, Address, AssetId, BlockHeight, @@ -22,12 +22,8 @@ mod repr; mod types; mod validity; -#[cfg(feature = "std")] mod id; -#[cfg(feature = "std")] -mod txio; - pub mod consensus_parameters; pub use consensus_parameters::{ @@ -73,11 +69,11 @@ use crate::input::{ }; #[cfg(feature = "std")] pub use fuel_types::ChainId; + #[cfg(feature = "std")] -pub use id::{ - Signable, - UniqueIdentifier, -}; +pub use id::Signable; + +pub use id::UniqueIdentifier; /// Identification of transaction (also called transaction hash) pub type TxId = Bytes32; @@ -85,6 +81,8 @@ pub type TxId = Bytes32; /// The fuel transaction entity . #[derive(Debug, Clone, PartialEq, Eq, Hash, strum_macros::EnumCount)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[derive(fuel_types::canonical::Serialize, fuel_types::canonical::Deserialize)] +#[canonical(inner_discriminant = TransactionRepr)] pub enum Transaction { Script(Script), Create(Create), @@ -432,16 +430,6 @@ pub trait Executable: field::Inputs + field::Outputs + field::Witnesses { impl Executable for T {} -impl SizedBytes for Transaction { - fn serialized_size(&self) -> usize { - match self { - Self::Script(script) => script.serialized_size(), - Self::Create(create) => create.serialized_size(), - Self::Mint(mint) => mint.serialized_size(), - } - } -} - impl From