From 9112b926aae75c3beec0024e80ddaec0e252af79 Mon Sep 17 00:00:00 2001
From: Pavlo Khrystenko
Date: Tue, 5 Nov 2024 13:17:21 +0100
Subject: [PATCH] fmt
---
derive/src/decode.rs | 15 +++---
derive/src/lib.rs | 12 ++---
derive/src/max_encoded_len.rs | 10 ++--
derive/src/trait_bounds.rs | 30 +++++------
derive/src/utils.rs | 42 +++++++--------
fuzzer/src/main.rs | 5 +-
git.pach | 97 -----------------------------------
src/codec.rs | 20 ++++----
src/compact.rs | 15 +++---
9 files changed, 66 insertions(+), 180 deletions(-)
delete mode 100644 git.pach
diff --git a/derive/src/decode.rs b/derive/src/decode.rs
index e7431625..593305c2 100644
--- a/derive/src/decode.rs
+++ b/derive/src/decode.rs
@@ -100,9 +100,8 @@ pub fn quote(
}
}
},
- Data::Union(_) => {
- Error::new(Span::call_site(), "Union types are not supported.").to_compile_error()
- },
+ Data::Union(_) =>
+ Error::new(Span::call_site(), "Union types are not supported.").to_compile_error(),
}
}
@@ -121,8 +120,8 @@ pub fn quote_decode_into(
let fields = match data {
Data::Struct(syn::DataStruct {
fields:
- Fields::Named(syn::FieldsNamed { named: fields, .. })
- | Fields::Unnamed(syn::FieldsUnnamed { unnamed: fields, .. }),
+ Fields::Named(syn::FieldsNamed { named: fields, .. }) |
+ Fields::Unnamed(syn::FieldsUnnamed { unnamed: fields, .. }),
..
}) => fields,
_ => return None,
@@ -134,9 +133,9 @@ pub fn quote_decode_into(
// Bail if there are any extra attributes which could influence how the type is decoded.
if fields.iter().any(|field| {
- utils::get_encoded_as_type(field).is_some()
- || utils::is_compact(field)
- || utils::should_skip(&field.attrs)
+ utils::get_encoded_as_type(field).is_some() ||
+ utils::is_compact(field) ||
+ utils::should_skip(&field.attrs)
}) {
return None;
}
diff --git a/derive/src/lib.rs b/derive/src/lib.rs
index 929d7081..8ba6d3de 100644
--- a/derive/src/lib.rs
+++ b/derive/src/lib.rs
@@ -367,19 +367,17 @@ pub fn compact_as_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStr
let constructor = quote!( #name(#( #recurse, )*));
(&field.ty, quote!(&self.#id), constructor)
},
- _ => {
+ _ =>
return Error::new(
data.fields.span(),
"Only structs with a single non-skipped field can derive CompactAs",
)
.to_compile_error()
- .into()
- },
- },
- Data::Enum(syn::DataEnum { enum_token: syn::token::Enum { span }, .. })
- | Data::Union(syn::DataUnion { union_token: syn::token::Union { span }, .. }) => {
- return Error::new(span, "Only structs can derive CompactAs").to_compile_error().into()
+ .into(),
},
+ Data::Enum(syn::DataEnum { enum_token: syn::token::Enum { span }, .. }) |
+ Data::Union(syn::DataUnion { union_token: syn::token::Union { span }, .. }) =>
+ return Error::new(span, "Only structs can derive CompactAs").to_compile_error().into(),
};
let impl_block = quote! {
diff --git a/derive/src/max_encoded_len.rs b/derive/src/max_encoded_len.rs
index 4bb9099b..b9bcf580 100644
--- a/derive/src/max_encoded_len.rs
+++ b/derive/src/max_encoded_len.rs
@@ -66,12 +66,10 @@ pub fn derive_max_encoded_len(input: proc_macro::TokenStream) -> proc_macro::Tok
/// generate an expression to sum up the max encoded length from several fields
fn fields_length_expr(fields: &Fields, crate_path: &syn::Path) -> proc_macro2::TokenStream {
let fields_iter: Box> = match fields {
- Fields::Named(ref fields) => {
- Box::new(fields.named.iter().filter(|field| !should_skip(&field.attrs)))
- },
- Fields::Unnamed(ref fields) => {
- Box::new(fields.unnamed.iter().filter(|field| !should_skip(&field.attrs)))
- },
+ Fields::Named(ref fields) =>
+ Box::new(fields.named.iter().filter(|field| !should_skip(&field.attrs))),
+ Fields::Unnamed(ref fields) =>
+ Box::new(fields.unnamed.iter().filter(|field| !should_skip(&field.attrs))),
Fields::Unit => Box::new(std::iter::empty()),
};
// expands to an expression like
diff --git a/derive/src/trait_bounds.rs b/derive/src/trait_bounds.rs
index b5930228..40cb97db 100644
--- a/derive/src/trait_bounds.rs
+++ b/derive/src/trait_bounds.rs
@@ -121,9 +121,8 @@ pub fn add(
generics.make_where_clause().predicates.extend(bounds);
return Ok(());
},
- Some(CustomTraitBound::SkipTypeParams { type_names, .. }) => {
- type_names.into_iter().collect::>()
- },
+ Some(CustomTraitBound::SkipTypeParams { type_names, .. }) =>
+ type_names.into_iter().collect::>(),
None => Vec::new(),
};
@@ -190,9 +189,9 @@ fn get_types_to_add_trait_bound(
Ok(ty_params.iter().map(|t| parse_quote!( #t )).collect())
} else {
let needs_codec_bound = |f: &syn::Field| {
- !utils::is_compact(f)
- && utils::get_encoded_as_type(f).is_none()
- && !utils::should_skip(&f.attrs)
+ !utils::is_compact(f) &&
+ utils::get_encoded_as_type(f).is_none() &&
+ !utils::should_skip(&f.attrs)
};
let res = collect_types(data, needs_codec_bound)?
.into_iter()
@@ -223,10 +222,9 @@ fn collect_types(data: &syn::Data, type_filter: fn(&syn::Field) -> bool) -> Resu
let types = match *data {
Data::Struct(ref data) => match &data.fields {
- | Fields::Named(FieldsNamed { named: fields, .. })
- | Fields::Unnamed(FieldsUnnamed { unnamed: fields, .. }) => {
- fields.iter().filter(|f| type_filter(f)).map(|f| f.ty.clone()).collect()
- },
+ | Fields::Named(FieldsNamed { named: fields, .. }) |
+ Fields::Unnamed(FieldsUnnamed { unnamed: fields, .. }) =>
+ fields.iter().filter(|f| type_filter(f)).map(|f| f.ty.clone()).collect(),
Fields::Unit => Vec::new(),
},
@@ -236,18 +234,16 @@ fn collect_types(data: &syn::Data, type_filter: fn(&syn::Field) -> bool) -> Resu
.iter()
.filter(|variant| !utils::should_skip(&variant.attrs))
.flat_map(|variant| match &variant.fields {
- | Fields::Named(FieldsNamed { named: fields, .. })
- | Fields::Unnamed(FieldsUnnamed { unnamed: fields, .. }) => {
- fields.iter().filter(|f| type_filter(f)).map(|f| f.ty.clone()).collect()
- },
+ | Fields::Named(FieldsNamed { named: fields, .. }) |
+ Fields::Unnamed(FieldsUnnamed { unnamed: fields, .. }) =>
+ fields.iter().filter(|f| type_filter(f)).map(|f| f.ty.clone()).collect(),
Fields::Unit => Vec::new(),
})
.collect(),
- Data::Union(ref data) => {
- return Err(Error::new(data.union_token.span(), "Union types are not supported."))
- },
+ Data::Union(ref data) =>
+ return Err(Error::new(data.union_token.span(), "Union types are not supported.")),
};
Ok(types)
diff --git a/derive/src/utils.rs b/derive/src/utils.rs
index ebc0c0aa..cd3d5375 100644
--- a/derive/src/utils.rs
+++ b/derive/src/utils.rs
@@ -346,17 +346,16 @@ pub fn check_attributes(input: &DeriveInput) -> syn::Result<()> {
match input.data {
Data::Struct(ref data) => match &data.fields {
- | Fields::Named(FieldsNamed { named: fields, .. })
- | Fields::Unnamed(FieldsUnnamed { unnamed: fields, .. }) => {
+ | Fields::Named(FieldsNamed { named: fields, .. }) |
+ Fields::Unnamed(FieldsUnnamed { unnamed: fields, .. }) =>
for field in fields {
for attr in &field.attrs {
check_field_attribute(attr)?;
}
- }
- },
+ },
Fields::Unit => (),
},
- Data::Enum(ref data) => {
+ Data::Enum(ref data) =>
for variant in data.variants.iter() {
for attr in &variant.attrs {
check_variant_attribute(attr)?;
@@ -366,8 +365,7 @@ pub fn check_attributes(input: &DeriveInput) -> syn::Result<()> {
check_field_attribute(attr)?;
}
}
- }
- },
+ },
Data::Union(_) => (),
}
Ok(())
@@ -375,10 +373,10 @@ pub fn check_attributes(input: &DeriveInput) -> syn::Result<()> {
// Check if the attribute is `#[allow(..)]`, `#[deny(..)]`, `#[forbid(..)]` or `#[warn(..)]`.
pub fn is_lint_attribute(attr: &Attribute) -> bool {
- attr.path().is_ident("allow")
- || attr.path().is_ident("deny")
- || attr.path().is_ident("forbid")
- || attr.path().is_ident("warn")
+ attr.path().is_ident("allow") ||
+ attr.path().is_ident("deny") ||
+ attr.path().is_ident("forbid") ||
+ attr.path().is_ident("warn")
}
// Ensure a field is decorated only with the following attributes:
@@ -403,11 +401,10 @@ fn check_field_attribute(attr: &Attribute) -> syn::Result<()> {
path,
value: Expr::Lit(ExprLit { lit: Lit::Str(lit_str), .. }),
..
- }) if path.get_ident().map_or(false, |i| i == "encoded_as") => {
+ }) if path.get_ident().map_or(false, |i| i == "encoded_as") =>
TokenStream::from_str(&lit_str.value())
.map(|_| ())
- .map_err(|_e| syn::Error::new(lit_str.span(), "Invalid token stream"))
- },
+ .map_err(|_e| syn::Error::new(lit_str.span(), "Invalid token stream")),
elt => Err(syn::Error::new(elt.span(), field_error)),
}
@@ -454,21 +451,20 @@ fn check_top_attribute(attr: &Attribute) -> syn::Result<()> {
`#[codec(decode_bound(T: Decode))]`, \
`#[codec(decode_bound_with_mem_tracking_bound(T: DecodeWithMemTracking))]` or \
`#[codec(mel_bound(T: MaxEncodedLen))]` are accepted as top attribute";
- if attr.path().is_ident("codec")
- && attr.parse_args::>().is_err()
- && attr.parse_args::>().is_err()
- && attr.parse_args::>().is_err()
- && attr.parse_args::>().is_err()
- && codec_crate_path_inner(attr).is_none()
+ if attr.path().is_ident("codec") &&
+ attr.parse_args::>().is_err() &&
+ attr.parse_args::>().is_err() &&
+ attr.parse_args::>().is_err() &&
+ attr.parse_args::>().is_err() &&
+ codec_crate_path_inner(attr).is_none()
{
let nested = attr.parse_args_with(Punctuated::::parse_terminated)?;
if nested.len() != 1 {
return Err(syn::Error::new(attr.meta.span(), top_error));
}
match nested.first().expect("Just checked that there is one item; qed") {
- Meta::Path(path) if path.get_ident().map_or(false, |i| i == "dumb_trait_bound") => {
- Ok(())
- },
+ Meta::Path(path) if path.get_ident().map_or(false, |i| i == "dumb_trait_bound") =>
+ Ok(()),
elt => Err(syn::Error::new(elt.span(), top_error)),
}
diff --git a/fuzzer/src/main.rs b/fuzzer/src/main.rs
index 10c48520..198058e6 100644
--- a/fuzzer/src/main.rs
+++ b/fuzzer/src/main.rs
@@ -269,13 +269,12 @@ fn fuzz_encode(data: T
let mut obj: &[u8] = &data.encode();
let decoded = ::decode(&mut obj);
match decoded {
- Ok(object) => {
+ Ok(object) =>
if object != original {
println!("original object: {:?}", original);
println!("decoded object: {:?}", object);
panic!("Original object differs from decoded object")
- }
- },
+ },
Err(e) => {
println!("original object: {:?}", original);
println!("decoding error: {:?}", e);
diff --git a/git.pach b/git.pach
deleted file mode 100644
index 04d2232a..00000000
--- a/git.pach
+++ /dev/null
@@ -1,97 +0,0 @@
-index 2812fc6..c803ff7 100644
---- a/derive/src/utils.rs
-+++ b/derive/src/utils.rs
-@@ -43,45 +43,14 @@ where
- pub fn check_indexes<'a, I: Iterator- >(values: I) -> syn::Result<()> {
- let mut map: HashMap = HashMap::new();
- for (i, v) in values.enumerate() {
-- if let Some(index) = find_meta_item(v.attrs.iter(), |meta| {
-- if let Meta::NameValue(ref nv) = meta {
-- if nv.path.is_ident("index") {
-- if let Expr::Lit(ExprLit { lit: Lit::Int(ref v), .. }) = nv.value {
-- let byte = v
-- .base10_parse::()
-- .expect("Internal error, index attribute must have been checked");
-- return Some(byte);
-- }
-- }
-- }
-- None
-- }) {
-- if let Some(span) = map.insert(index, v.span()) {
-- let mut error = syn::Error::new(v.span(), "Duplicate variant index. qed");
-- error.combine(syn::Error::new(span, "Variant index already defined here."));
-- return Err(error)
-- }
-- } else {
-- match v.discriminant.as_ref() {
-- Some((_, syn::Expr::Lit(ExprLit { lit: syn::Lit::Int(lit_int), .. }))) => {
-- let index = lit_int
-- .base10_parse::()
-- .expect("Internal error, index attribute must have been checked");
-- if let Some(span) = map.insert(index, v.span()) {
-- let mut error = syn::Error::new(v.span(), "Duplicate variant index. qed");
-- error.combine(syn::Error::new(span, "Variant index already defined here."));
-- return Err(error)
-- }
-- },
-- Some((_, _)) => return Err(syn::Error::new(v.span(), "Invalid discriminant. qed")),
-- None =>
-- if let Some(span) = map.insert(i.try_into().unwrap(), v.span()) {
-- let mut error =
-- syn::Error::new(span, "Custom variant index is duplicated later. qed");
-- error.combine(syn::Error::new(v.span(), "Variant index derived here."));
-- return Err(error)
-- },
-- }
-+ let index = variant_index(v, i)?;
-+ if let Some(span) = map.insert(index, v.span()) {
-+ let mut error = syn::Error::new(
-+ v.span(),
-+ "scale codec error: Invalid variant index, the variant index is duplicated.",
-+ );
-+ error.combine(syn::Error::new(span, "Variant index used here."));
-+ return Err(error)
- }
- }
- Ok(())
-@@ -89,7 +58,7 @@ pub fn check_indexes<'a, I: Iterator
- >(values: I) -> syn:
-
- /// Look for a `#[scale(index = $int)]` attribute on a variant. If no attribute
- /// is found, fall back to the discriminant or just the variant index.
--pub fn variant_index(v: &Variant, index: usize) -> syn::Result {
-+pub fn variant_index(v: &Variant, index: usize) -> syn::Result {
- // first look for an attribute
- let codec_index = find_meta_item(v.attrs.iter(), |meta| {
- if let Meta::NameValue(ref nv) = meta {
-@@ -106,13 +75,25 @@ pub fn variant_index(v: &Variant, index: usize) -> syn::Result {
- None
- });
- if let Some(index) = codec_index {
-- Ok(quote! { #index })
-+ Ok(index)
- } else {
- match v.discriminant.as_ref() {
-- Some((_, expr @ syn::Expr::Lit(ExprLit { lit: syn::Lit::Int(_), .. }))) =>
-- Ok(quote! { #expr }),
-- Some((_, expr)) => Err(syn::Error::new(expr.span(), "Invalid discriminant. qed")),
-- None => Ok(quote! { #index }),
-+ Some((_, syn::Expr::Lit(ExprLit { lit: syn::Lit::Int(v), .. }))) => {
-+ let byte = v
-+ .base10_parse::()
-+ .expect("scale codec error: Invalid variant index, discriminant doesn't fit u8.");
-+ Ok(byte)
-+ },
-+ Some((_, expr)) => Err(syn::Error::new(
-+ expr.span(),
-+ "scale codec error: Invalid discriminant, only int literal are accepted, e.g. \
-+ `= 32`.",
-+ )),
-+ None => index.try_into().map_err(|_| syn::Error::new(
-+ v.span(),
-+ "scale codec error: Variant index is too large, only 256 variants are \
-+ supported.",
-+ )),
- }
- }
- }
diff --git a/src/codec.rs b/src/codec.rs
index 9307a43e..1dce353a 100644
--- a/src/codec.rs
+++ b/src/codec.rs
@@ -918,16 +918,16 @@ impl Decode for [T; N] {
) -> Result {
let is_primitive = match ::TYPE_INFO {
| TypeInfo::U8 | TypeInfo::I8 => true,
- | TypeInfo::U16
- | TypeInfo::I16
- | TypeInfo::U32
- | TypeInfo::I32
- | TypeInfo::U64
- | TypeInfo::I64
- | TypeInfo::U128
- | TypeInfo::I128
- | TypeInfo::F32
- | TypeInfo::F64 => cfg!(target_endian = "little"),
+ | TypeInfo::U16 |
+ TypeInfo::I16 |
+ TypeInfo::U32 |
+ TypeInfo::I32 |
+ TypeInfo::U64 |
+ TypeInfo::I64 |
+ TypeInfo::U128 |
+ TypeInfo::I128 |
+ TypeInfo::F32 |
+ TypeInfo::F64 => cfg!(target_endian = "little"),
TypeInfo::Unknown => false,
};
diff --git a/src/compact.rs b/src/compact.rs
index c74b432c..ffcd2ae4 100644
--- a/src/compact.rs
+++ b/src/compact.rs
@@ -364,9 +364,8 @@ impl<'a> Encode for CompactRef<'a, u32> {
match self.0 {
0..=0b0011_1111 => dest.push_byte((*self.0 as u8) << 2),
0..=0b0011_1111_1111_1111 => (((*self.0 as u16) << 2) | 0b01).encode_to(dest),
- 0..=0b0011_1111_1111_1111_1111_1111_1111_1111 => {
- ((*self.0 << 2) | 0b10).encode_to(dest)
- },
+ 0..=0b0011_1111_1111_1111_1111_1111_1111_1111 =>
+ ((*self.0 << 2) | 0b10).encode_to(dest),
_ => {
dest.push_byte(0b11);
self.0.encode_to(dest);
@@ -401,9 +400,8 @@ impl<'a> Encode for CompactRef<'a, u64> {
match self.0 {
0..=0b0011_1111 => dest.push_byte((*self.0 as u8) << 2),
0..=0b0011_1111_1111_1111 => (((*self.0 as u16) << 2) | 0b01).encode_to(dest),
- 0..=0b0011_1111_1111_1111_1111_1111_1111_1111 => {
- (((*self.0 as u32) << 2) | 0b10).encode_to(dest)
- },
+ 0..=0b0011_1111_1111_1111_1111_1111_1111_1111 =>
+ (((*self.0 as u32) << 2) | 0b10).encode_to(dest),
_ => {
let bytes_needed = 8 - self.0.leading_zeros() / 8;
assert!(
@@ -448,9 +446,8 @@ impl<'a> Encode for CompactRef<'a, u128> {
match self.0 {
0..=0b0011_1111 => dest.push_byte((*self.0 as u8) << 2),
0..=0b0011_1111_1111_1111 => (((*self.0 as u16) << 2) | 0b01).encode_to(dest),
- 0..=0b0011_1111_1111_1111_1111_1111_1111_1111 => {
- (((*self.0 as u32) << 2) | 0b10).encode_to(dest)
- },
+ 0..=0b0011_1111_1111_1111_1111_1111_1111_1111 =>
+ (((*self.0 as u32) << 2) | 0b10).encode_to(dest),
_ => {
let bytes_needed = 16 - self.0.leading_zeros() / 8;
assert!(