Skip to content

Commit

Permalink
fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
pkhry committed Nov 5, 2024
1 parent a25db72 commit 9112b92
Show file tree
Hide file tree
Showing 9 changed files with 66 additions and 180 deletions.
15 changes: 7 additions & 8 deletions derive/src/decode.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,9 +100,8 @@ pub fn quote(
}
}
},
Data::Union(_) => {
Error::new(Span::call_site(), "Union types are not supported.").to_compile_error()
},
Data::Union(_) =>
Error::new(Span::call_site(), "Union types are not supported.").to_compile_error(),
}
}

Expand All @@ -121,8 +120,8 @@ pub fn quote_decode_into(
let fields = match data {
Data::Struct(syn::DataStruct {
fields:
Fields::Named(syn::FieldsNamed { named: fields, .. })
| Fields::Unnamed(syn::FieldsUnnamed { unnamed: fields, .. }),
Fields::Named(syn::FieldsNamed { named: fields, .. }) |
Fields::Unnamed(syn::FieldsUnnamed { unnamed: fields, .. }),
..
}) => fields,
_ => return None,
Expand All @@ -134,9 +133,9 @@ pub fn quote_decode_into(

// Bail if there are any extra attributes which could influence how the type is decoded.
if fields.iter().any(|field| {
utils::get_encoded_as_type(field).is_some()
|| utils::is_compact(field)
|| utils::should_skip(&field.attrs)
utils::get_encoded_as_type(field).is_some() ||
utils::is_compact(field) ||
utils::should_skip(&field.attrs)
}) {
return None;
}
Expand Down
12 changes: 5 additions & 7 deletions derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -367,19 +367,17 @@ pub fn compact_as_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStr
let constructor = quote!( #name(#( #recurse, )*));
(&field.ty, quote!(&self.#id), constructor)
},
_ => {
_ =>
return Error::new(
data.fields.span(),
"Only structs with a single non-skipped field can derive CompactAs",
)
.to_compile_error()
.into()
},
},
Data::Enum(syn::DataEnum { enum_token: syn::token::Enum { span }, .. })
| Data::Union(syn::DataUnion { union_token: syn::token::Union { span }, .. }) => {
return Error::new(span, "Only structs can derive CompactAs").to_compile_error().into()
.into(),
},
Data::Enum(syn::DataEnum { enum_token: syn::token::Enum { span }, .. }) |
Data::Union(syn::DataUnion { union_token: syn::token::Union { span }, .. }) =>
return Error::new(span, "Only structs can derive CompactAs").to_compile_error().into(),
};

let impl_block = quote! {
Expand Down
10 changes: 4 additions & 6 deletions derive/src/max_encoded_len.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,12 +66,10 @@ pub fn derive_max_encoded_len(input: proc_macro::TokenStream) -> proc_macro::Tok
/// generate an expression to sum up the max encoded length from several fields
fn fields_length_expr(fields: &Fields, crate_path: &syn::Path) -> proc_macro2::TokenStream {
let fields_iter: Box<dyn Iterator<Item = &Field>> = match fields {
Fields::Named(ref fields) => {
Box::new(fields.named.iter().filter(|field| !should_skip(&field.attrs)))
},
Fields::Unnamed(ref fields) => {
Box::new(fields.unnamed.iter().filter(|field| !should_skip(&field.attrs)))
},
Fields::Named(ref fields) =>
Box::new(fields.named.iter().filter(|field| !should_skip(&field.attrs))),
Fields::Unnamed(ref fields) =>
Box::new(fields.unnamed.iter().filter(|field| !should_skip(&field.attrs))),
Fields::Unit => Box::new(std::iter::empty()),
};
// expands to an expression like
Expand Down
30 changes: 13 additions & 17 deletions derive/src/trait_bounds.rs
Original file line number Diff line number Diff line change
Expand Up @@ -121,9 +121,8 @@ pub fn add<N>(
generics.make_where_clause().predicates.extend(bounds);
return Ok(());
},
Some(CustomTraitBound::SkipTypeParams { type_names, .. }) => {
type_names.into_iter().collect::<Vec<_>>()
},
Some(CustomTraitBound::SkipTypeParams { type_names, .. }) =>
type_names.into_iter().collect::<Vec<_>>(),
None => Vec::new(),
};

Expand Down Expand Up @@ -190,9 +189,9 @@ fn get_types_to_add_trait_bound(
Ok(ty_params.iter().map(|t| parse_quote!( #t )).collect())
} else {
let needs_codec_bound = |f: &syn::Field| {
!utils::is_compact(f)
&& utils::get_encoded_as_type(f).is_none()
&& !utils::should_skip(&f.attrs)
!utils::is_compact(f) &&
utils::get_encoded_as_type(f).is_none() &&
!utils::should_skip(&f.attrs)
};
let res = collect_types(data, needs_codec_bound)?
.into_iter()
Expand Down Expand Up @@ -223,10 +222,9 @@ fn collect_types(data: &syn::Data, type_filter: fn(&syn::Field) -> bool) -> Resu

let types = match *data {
Data::Struct(ref data) => match &data.fields {
| Fields::Named(FieldsNamed { named: fields, .. })
| Fields::Unnamed(FieldsUnnamed { unnamed: fields, .. }) => {
fields.iter().filter(|f| type_filter(f)).map(|f| f.ty.clone()).collect()
},
| Fields::Named(FieldsNamed { named: fields, .. }) |
Fields::Unnamed(FieldsUnnamed { unnamed: fields, .. }) =>
fields.iter().filter(|f| type_filter(f)).map(|f| f.ty.clone()).collect(),

Fields::Unit => Vec::new(),
},
Expand All @@ -236,18 +234,16 @@ fn collect_types(data: &syn::Data, type_filter: fn(&syn::Field) -> bool) -> Resu
.iter()
.filter(|variant| !utils::should_skip(&variant.attrs))
.flat_map(|variant| match &variant.fields {
| Fields::Named(FieldsNamed { named: fields, .. })
| Fields::Unnamed(FieldsUnnamed { unnamed: fields, .. }) => {
fields.iter().filter(|f| type_filter(f)).map(|f| f.ty.clone()).collect()
},
| Fields::Named(FieldsNamed { named: fields, .. }) |
Fields::Unnamed(FieldsUnnamed { unnamed: fields, .. }) =>
fields.iter().filter(|f| type_filter(f)).map(|f| f.ty.clone()).collect(),

Fields::Unit => Vec::new(),
})
.collect(),

Data::Union(ref data) => {
return Err(Error::new(data.union_token.span(), "Union types are not supported."))
},
Data::Union(ref data) =>
return Err(Error::new(data.union_token.span(), "Union types are not supported.")),
};

Ok(types)
Expand Down
42 changes: 19 additions & 23 deletions derive/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -346,17 +346,16 @@ pub fn check_attributes(input: &DeriveInput) -> syn::Result<()> {

match input.data {
Data::Struct(ref data) => match &data.fields {
| Fields::Named(FieldsNamed { named: fields, .. })
| Fields::Unnamed(FieldsUnnamed { unnamed: fields, .. }) => {
| Fields::Named(FieldsNamed { named: fields, .. }) |
Fields::Unnamed(FieldsUnnamed { unnamed: fields, .. }) =>
for field in fields {
for attr in &field.attrs {
check_field_attribute(attr)?;
}
}
},
},
Fields::Unit => (),
},
Data::Enum(ref data) => {
Data::Enum(ref data) =>
for variant in data.variants.iter() {
for attr in &variant.attrs {
check_variant_attribute(attr)?;
Expand All @@ -366,19 +365,18 @@ pub fn check_attributes(input: &DeriveInput) -> syn::Result<()> {
check_field_attribute(attr)?;
}
}
}
},
},
Data::Union(_) => (),
}
Ok(())
}

// Check if the attribute is `#[allow(..)]`, `#[deny(..)]`, `#[forbid(..)]` or `#[warn(..)]`.
pub fn is_lint_attribute(attr: &Attribute) -> bool {
attr.path().is_ident("allow")
|| attr.path().is_ident("deny")
|| attr.path().is_ident("forbid")
|| attr.path().is_ident("warn")
attr.path().is_ident("allow") ||
attr.path().is_ident("deny") ||
attr.path().is_ident("forbid") ||
attr.path().is_ident("warn")
}

// Ensure a field is decorated only with the following attributes:
Expand All @@ -403,11 +401,10 @@ fn check_field_attribute(attr: &Attribute) -> syn::Result<()> {
path,
value: Expr::Lit(ExprLit { lit: Lit::Str(lit_str), .. }),
..
}) if path.get_ident().map_or(false, |i| i == "encoded_as") => {
}) if path.get_ident().map_or(false, |i| i == "encoded_as") =>
TokenStream::from_str(&lit_str.value())
.map(|_| ())
.map_err(|_e| syn::Error::new(lit_str.span(), "Invalid token stream"))
},
.map_err(|_e| syn::Error::new(lit_str.span(), "Invalid token stream")),

elt => Err(syn::Error::new(elt.span(), field_error)),
}
Expand Down Expand Up @@ -454,21 +451,20 @@ fn check_top_attribute(attr: &Attribute) -> syn::Result<()> {
`#[codec(decode_bound(T: Decode))]`, \
`#[codec(decode_bound_with_mem_tracking_bound(T: DecodeWithMemTracking))]` or \
`#[codec(mel_bound(T: MaxEncodedLen))]` are accepted as top attribute";
if attr.path().is_ident("codec")
&& attr.parse_args::<CustomTraitBound<encode_bound>>().is_err()
&& attr.parse_args::<CustomTraitBound<decode_bound>>().is_err()
&& attr.parse_args::<CustomTraitBound<decode_with_mem_tracking_bound>>().is_err()
&& attr.parse_args::<CustomTraitBound<mel_bound>>().is_err()
&& codec_crate_path_inner(attr).is_none()
if attr.path().is_ident("codec") &&
attr.parse_args::<CustomTraitBound<encode_bound>>().is_err() &&
attr.parse_args::<CustomTraitBound<decode_bound>>().is_err() &&
attr.parse_args::<CustomTraitBound<decode_with_mem_tracking_bound>>().is_err() &&
attr.parse_args::<CustomTraitBound<mel_bound>>().is_err() &&
codec_crate_path_inner(attr).is_none()
{
let nested = attr.parse_args_with(Punctuated::<Meta, Token![,]>::parse_terminated)?;
if nested.len() != 1 {
return Err(syn::Error::new(attr.meta.span(), top_error));
}
match nested.first().expect("Just checked that there is one item; qed") {
Meta::Path(path) if path.get_ident().map_or(false, |i| i == "dumb_trait_bound") => {
Ok(())
},
Meta::Path(path) if path.get_ident().map_or(false, |i| i == "dumb_trait_bound") =>
Ok(()),

elt => Err(syn::Error::new(elt.span(), top_error)),
}
Expand Down
5 changes: 2 additions & 3 deletions fuzzer/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -269,13 +269,12 @@ fn fuzz_encode<T: Encode + Decode + Clone + PartialEq + std::fmt::Debug>(data: T
let mut obj: &[u8] = &data.encode();
let decoded = <T>::decode(&mut obj);
match decoded {
Ok(object) => {
Ok(object) =>
if object != original {
println!("original object: {:?}", original);
println!("decoded object: {:?}", object);
panic!("Original object differs from decoded object")
}
},
},
Err(e) => {
println!("original object: {:?}", original);
println!("decoding error: {:?}", e);
Expand Down
97 changes: 0 additions & 97 deletions git.pach

This file was deleted.

20 changes: 10 additions & 10 deletions src/codec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -918,16 +918,16 @@ impl<T: Decode, const N: usize> Decode for [T; N] {
) -> Result<DecodeFinished, Error> {
let is_primitive = match <T as Decode>::TYPE_INFO {
| TypeInfo::U8 | TypeInfo::I8 => true,
| TypeInfo::U16
| TypeInfo::I16
| TypeInfo::U32
| TypeInfo::I32
| TypeInfo::U64
| TypeInfo::I64
| TypeInfo::U128
| TypeInfo::I128
| TypeInfo::F32
| TypeInfo::F64 => cfg!(target_endian = "little"),
| TypeInfo::U16 |
TypeInfo::I16 |
TypeInfo::U32 |
TypeInfo::I32 |
TypeInfo::U64 |
TypeInfo::I64 |
TypeInfo::U128 |
TypeInfo::I128 |
TypeInfo::F32 |
TypeInfo::F64 => cfg!(target_endian = "little"),
TypeInfo::Unknown => false,
};

Expand Down
Loading

0 comments on commit 9112b92

Please sign in to comment.