neli-proc-macros-0.2.1/.cargo_vcs_info.json0000644000000001560000000000100142100ustar { "git": { "sha1": "715fb4a06c8063d58a1ef5a309ef9e75c3d8b55e" }, "path_in_vcs": "neli-proc-macros" }neli-proc-macros-0.2.1/.gitignore000064400000000000000000000000131046102023000147600ustar 00000000000000Cargo.lock neli-proc-macros-0.2.1/Cargo.lock0000644000000034220000000000100121620ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 4 [[package]] name = "either" version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "neli-proc-macros" version = "0.2.1" dependencies = [ "either", "proc-macro2", "quote", "serde", "syn", ] [[package]] name = "proc-macro2" version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" dependencies = [ "unicode-ident", ] [[package]] name = "quote" version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" dependencies = [ "proc-macro2", ] [[package]] name = "serde" version = "1.0.210" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" version = "1.0.210" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "syn" version = "2.0.77" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9f35bcdf61fd8e7be6caf75f429fdca8beb3ed76584befb503b1569faee373ed" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "unicode-ident" version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" neli-proc-macros-0.2.1/Cargo.toml0000644000000022110000000000100122000ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2024" name = "neli-proc-macros" version = "0.2.1" authors = ["John Baublitz "] build = false autolib = false autobins = false autoexamples = false autotests = false autobenches = false description = "Procedural macros for neli" readme = false license = "BSD-3-Clause" repository = "https://github.com/jbaublitz/neli" [lib] name = "neli_proc_macros" path = "src/lib.rs" proc-macro = true [dependencies.either] version = "1.6" [dependencies.proc-macro2] version = "1" [dependencies.quote] version = "1" [dependencies.serde] version = "1" features = ["derive"] [dependencies.syn] version = "2.0" features = [ "full", "extra-traits", ] neli-proc-macros-0.2.1/Cargo.toml.orig000064400000000000000000000007141046102023000156670ustar 00000000000000[package] name = "neli-proc-macros" version = "0.2.1" authors = ["John Baublitz "] edition = "2024" description = "Procedural macros for neli" license = "BSD-3-Clause" repository = "https://github.com/jbaublitz/neli" [lib] proc-macro = true [dependencies] quote = "1" proc-macro2 = "1" either = "1.6" [dependencies.serde] version = "1" features = ["derive"] [dependencies.syn] version = "2.0" features = ["full", "extra-traits"] neli-proc-macros-0.2.1/LICENSE000064400000000000000000000027511046102023000140100ustar 00000000000000BSD 3-Clause License Copyright (c) 2017, John Baublitz All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. neli-proc-macros-0.2.1/src/derive_frombytes.rs000064400000000000000000000137101046102023000175050ustar 00000000000000use proc_macro2::{Span, TokenStream as TokenStream2}; use quote::quote; use syn::{Attribute, Fields, Ident, ItemStruct, Type, parse_str}; use crate::shared::{StructInfo, process_input, process_size, process_skip_debug}; fn process_attrs(field_type: Type, field_attrs: Vec) -> TokenStream2 { let input = process_input(&field_attrs); let skip_debug = process_skip_debug(&field_attrs); let size = process_size(&field_attrs) .unwrap_or_else(|| parse_str("input").expect("input is a valid expression")); match (input, skip_debug) { (Some(Some(input)), _) => quote! { { let input = #input; log::trace!( "Deserializing field type {}", std::any::type_name::<#field_type>(), ); let position = buffer.position() as usize; let slice = buffer.get_ref().as_ref().get(position..position + #size); if let Some(buf) = slice { log::trace!( "Buffer to be deserialized: {buf:?}", ); } let ok = <#field_type as neli::FromBytesWithInput>::from_bytes_with_input( buffer, input, )?; log::trace!("Field deserialized: {:?}", ok); ok } }, (Some(None), _) => quote! { { log::trace!( "Deserializing field type {}", std::any::type_name::<#field_type>(), ); let position = buffer.position() as usize; let slice = buffer.get_ref().as_ref().get(position..position + #size); if let Some(buf) = slice { log::trace!( "Buffer to be deserialized: {buf:?}", ); } let ok = <#field_type as neli::FromBytesWithInput>::from_bytes_with_input( buffer, input, )?; log::trace!("Field deserialized: {:?}", ok); ok } }, (None, true) => quote! { { log::trace!( "Deserializing field type {}", std::any::type_name::<#field_type>(), ); let position = buffer.position() as usize; let ok = <#field_type as neli::FromBytes>::from_bytes(buffer)?; log::trace!("Field deserialized: {:?}", ok); ok } }, (None, false) => quote! { { log::trace!( "Deserializing field type {}", std::any::type_name::<#field_type>(), ); let position = buffer.position() as usize; let slice = buffer.get_ref() .as_ref() .get(position..position + <#field_type as neli::TypeSize>::type_size()); if let Some(buf) = slice { log::trace!( "Buffer to be deserialized: {buf:?}", ); } let ok = <#field_type as neli::FromBytes>::from_bytes(buffer)?; log::trace!("Field deserialized: {:?}", ok); ok } }, } } pub fn impl_frombytes_struct( is: ItemStruct, trt: &str, method_name: &str, input_type: Option, input: Option, ) -> TokenStream2 { let is_named = matches!(is.fields, Fields::Named(_)); let info = StructInfo::from_item_struct(is, Some(trt), "from_bytes_bound", false); let trt = Ident::new(trt, Span::call_site()); let method_name = Ident::new(method_name, Span::call_site()); let ( struct_name, generics, generics_without_bounds, field_names, field_types, field_attrs, padded, ) = info.into_tuple(); if field_names.is_empty() { return quote! { impl #generics neli::#trt for #struct_name #generics_without_bounds { #input_type fn #method_name(buffer: &mut std::io::Cursor> #input) -> Result { Ok(#struct_name) } } }; } let struct_expr = if is_named { quote! { #struct_name { #( #field_names, )* } } } else { quote! { #struct_name( #( #field_names, )* ) } }; let from_bytes_exprs = field_types .into_iter() .zip(field_attrs) .map(|(field_type, field_attrs)| process_attrs(field_type, field_attrs)); let padding = if padded { quote! { <#struct_name #generics_without_bounds as neli::FromBytes>::strip(buffer)?; } } else { TokenStream2::new() }; quote! { impl #generics neli::#trt for #struct_name #generics_without_bounds { #input_type fn #method_name(buffer: &mut std::io::Cursor> #input) -> Result { let pos = buffer.position(); let res = { let mut from_bytes_impl = || { log::trace!("Deserializing data type {}", stringify!(#struct_name)); #( let #field_names = #from_bytes_exprs; )* #padding Ok(#struct_expr) }; from_bytes_impl() }; match res { Ok(res) => Ok(res), Err(e) => { buffer.set_position(pos); Err(e) }, } } } } } neli-proc-macros-0.2.1/src/derive_header.rs000064400000000000000000000012651046102023000167250ustar 00000000000000use proc_macro2::TokenStream as TokenStream2; use quote::quote; use syn::ItemStruct; use crate::shared::StructInfo; fn generate_header(mut i: StructInfo) -> TokenStream2 { i.pop_field(); let (struct_name, generics, generics_without_bounds, _, field_types, _, _) = i.into_tuple(); quote! { impl #generics neli::Header for #struct_name #generics_without_bounds { fn header_size() -> usize { #( <#field_types as neli::TypeSize>::type_size() )+* } } } } pub fn impl_header_struct(is: ItemStruct) -> TokenStream2 { let info = StructInfo::from_item_struct(is, None, "header_bound", false); generate_header(info) } neli-proc-macros-0.2.1/src/derive_size.rs000064400000000000000000000051341046102023000164460ustar 00000000000000use proc_macro2::TokenStream as TokenStream2; use quote::quote; use syn::{FieldsNamed, FieldsUnnamed, Ident, ItemEnum, ItemStruct}; use crate::shared::{ FieldInfo, StructInfo, generate_arms, generate_named_fields, generate_unnamed_fields, process_impl_generics, }; fn generate_size(i: StructInfo) -> TokenStream2 { let (struct_name, generics, generics_without_bounds, field_names, field_types, _, _) = i.into_tuple(); if field_types.is_empty() { quote! { impl #generics neli::Size for #struct_name #generics_without_bounds { fn unpadded_size(&self) -> usize { 0 } } } } else { quote! { impl #generics neli::Size for #struct_name #generics_without_bounds { fn unpadded_size(&self) -> usize { #( <#field_types as neli::Size>::unpadded_size(&self.#field_names) )+* } } } } } pub fn impl_size_struct(is: ItemStruct) -> TokenStream2 { let struct_info = StructInfo::from_item_struct(is, Some("Size"), "size_bound", true); generate_size(struct_info) } fn generate_named_pat_and_expr( enum_name: Ident, var_name: Ident, fields: FieldsNamed, ) -> TokenStream2 { let (field_names, types, _) = FieldInfo::to_vecs(generate_named_fields(fields).into_iter()); quote! { #enum_name::#var_name { #(#field_names),* } => { #(<#types as neli::Size>::unpadded_size(&#field_names))+* }, } } fn generate_unnamed_pat_and_expr( enum_name: Ident, var_name: Ident, fields: FieldsUnnamed, ) -> TokenStream2 { let (field_names, types, _) = FieldInfo::to_vecs(generate_unnamed_fields(fields, false).into_iter()); quote! { #enum_name::#var_name( #( #field_names ),* ) => { #( <#types as neli::Size>::unpadded_size(&#field_names) )+* } } } pub fn impl_size_enum(ie: ItemEnum) -> TokenStream2 { let (generics, generics_without_bounds) = process_impl_generics(ie.generics, Some("Size")); let enum_name = ie.ident; let arms = generate_arms( enum_name.clone(), ie.variants.into_iter().collect::>(), generate_named_pat_and_expr, generate_unnamed_pat_and_expr, quote! { 0 }, ); quote! { impl #generics neli::Size for #enum_name #generics_without_bounds { fn unpadded_size(&self) -> usize { match self { #(#arms)* } } } } } neli-proc-macros-0.2.1/src/derive_tobytes.rs000064400000000000000000000061031046102023000171620ustar 00000000000000use proc_macro2::TokenStream as TokenStream2; use quote::quote; use syn::{FieldsNamed, FieldsUnnamed, Ident, ItemEnum, ItemStruct}; use crate::shared::{ FieldInfo, StructInfo, generate_arms, generate_named_fields, generate_unnamed_fields, process_impl_generics, process_trait_bounds, }; pub fn impl_tobytes_struct(is: ItemStruct) -> TokenStream2 { let info = StructInfo::from_item_struct(is, Some("ToBytes"), "to_bytes_bound", true); let (struct_name, generics, generics_without_bounds, field_names, field_types, _, padded) = info.into_tuple(); if field_names.is_empty() { return quote! { impl neli::ToBytes for #struct_name { fn to_bytes(&self, _: &mut std::io::Cursor>) -> Result<(), neli::err::SerError> { Ok(()) } } }; } let padding = if padded { quote! { <#struct_name #generics_without_bounds as neli::ToBytes>::pad(&self, buffer)?; } } else { TokenStream2::new() }; quote! { impl #generics neli::ToBytes for #struct_name #generics_without_bounds { fn to_bytes(&self, buffer: &mut std::io::Cursor>) -> Result<(), neli::err::SerError> { #( <#field_types as neli::ToBytes>::to_bytes(&self.#field_names, buffer)?; )* #padding Ok(()) } } } } fn generate_named_pat_and_expr( enum_name: Ident, var_name: Ident, fields: FieldsNamed, ) -> TokenStream2 { let (field_names, types, _) = FieldInfo::to_vecs(generate_named_fields(fields).into_iter()); quote! { #enum_name::#var_name { #(#field_names),* } => { #(<#types as neli::ToBytes>::to_bytes(&#field_names, buffer)?; )* Ok(()) }, } } fn generate_unnamed_pat_and_expr( enum_name: Ident, var_name: Ident, fields: FieldsUnnamed, ) -> TokenStream2 { let (field_names, types, _) = FieldInfo::to_vecs(generate_unnamed_fields(fields, false).into_iter()); quote! { #enum_name::#var_name( #( #field_names ),* ) => { #( <#types as neli::ToBytes>::to_bytes(#field_names, buffer)?; )* Ok(()) } } } pub fn impl_tobytes_enum(ie: ItemEnum) -> TokenStream2 { let (generics, generics_without_bounds) = process_impl_generics(ie.generics, Some("ToBytes")); let trait_bounds = process_trait_bounds(&ie.attrs, "to_bytes_bound"); let enum_name = ie.ident; let arms = generate_arms( enum_name.clone(), ie.variants.into_iter().collect::>(), generate_named_pat_and_expr, generate_unnamed_pat_and_expr, quote! { Ok(()) }, ); quote! { impl #generics neli::ToBytes for #enum_name #generics_without_bounds where #( #trait_bounds ),* { fn to_bytes(&self, buffer: &mut std::io::Cursor>) -> Result<(), neli::err::SerError> { match self { #(#arms)* } } } } } neli-proc-macros-0.2.1/src/lib.rs000064400000000000000000000171341046102023000147070ustar 00000000000000//! Procedural macros to be used with the library //! [`neli`](https://github.com/jbaublitz/neli). //! //! All derive macros other than `Header` generate implicit type //! parameter bounds on every type parameter which can be overriden //! with struct attributes. use proc_macro::TokenStream; use quote::quote; use syn::{Item, Meta, parse}; #[macro_use] mod shared; mod derive_frombytes; mod derive_header; mod derive_size; mod derive_tobytes; mod neli_enum; use derive_frombytes::*; use derive_header::*; use derive_size::*; use derive_tobytes::*; use neli_enum::*; /// Converts an enum from the form: /// /// ```no_compile /// use neli_proc_macros::neli_enum; /// /// #[neli_enum(serialized_type = "u16")] /// pub enum MyConstants { /// ConstOne = 1, /// ConstTwo = 2, /// ConstThree = 3, /// } /// ``` /// /// to: /// /// ``` /// pub enum MyConstants { /// ConstOne, /// ConstTwo, /// ConstThree, /// } /// ``` /// /// with [`From`] implemented reflexively for `MyConstants` and /// [`u16`]. #[proc_macro_attribute] pub fn neli_enum(attr: TokenStream, item: TokenStream) -> TokenStream { let attr_string = attr.to_string(); let meta = parse::(attr).unwrap_or_else(|_| panic!("{attr_string} is not a valid attribute")); let enum_item = parse::(item).unwrap(); let enm = if let Item::Enum(e) = enum_item { e } else { panic!("This macro only operates on enums"); }; TokenStream::from(generate_neli_enum(enm, meta)) } /// Derives the neli `Size` trait for a struct or enum. /// /// Acceptable struct attribute is: /// * `#[neli(size_bound = "T: MyTrait")]` which will generate a /// trait bound in the impl for the specified type parameter. /// /// Implicit type parameter bound: `Size`. #[proc_macro_derive(Size, attributes(neli))] pub fn proc_macro_size(ts: TokenStream) -> TokenStream { let item = parse::(ts).unwrap(); TokenStream::from(match item { Item::Struct(strct) => impl_size_struct(strct), Item::Enum(enm) => impl_size_enum(enm), _ => panic!("Size can only be derived for structs and enums"), }) } /// Derives the neli `Header` trait for a struct or enum. Unlike /// other derive macros in this crate, the `Header` derive macro /// does not impose type parameter bounds on type parameters. /// See the accepted attribute for more information. The reason for /// this is that the last field is considered to be the payload. /// Because the payload may be represented by a type parameter, /// we cannot blindly restrict type parameters or else we impose /// an artificial restriction of `TypeSize` on the payload type /// parameter. This is a problem for the `Header` trait as the /// payload may be unsized even if the rest of the header is /// composed exclusively of statically sized types and are therefore /// compatible with the `TypeSize` trait. /// /// Acceptable struct attribute is: /// * `#[neli(header_bound = "T: MyTrait")]` which will generate a /// trait bound in the impl for the specified type parameter. /// /// While there is no implicit type parameter bound, every type /// parameter that does not correspond to a payload should have /// a specified type parameter bound of `TypeSize`. #[proc_macro_derive(Header, attributes(neli))] pub fn proc_macro_header(ts: TokenStream) -> TokenStream { let item = parse::(ts).unwrap(); TokenStream::from(match item { Item::Struct(strct) => impl_header_struct(strct), _ => panic!("Header can only be derived for structs"), }) } /// Derives the neli `FromBytes` trait for a struct. /// /// Acceptable struct attribute is: /// * `#[neli(from_bytes_bound = "T: MyTrait")]` which will generate /// a trait bound in the impl for the specified type parameter. /// * `#[neli(padding)]` which will add special handling for padding /// for this struct. /// /// Acceptable field attribute forms are: /// * `#[neli(input = "input_expression")]` which may only be used /// once for a struct. The behavior of this attribute is that a /// bound requirement will change from the implicit `FromBytes` to /// an implicit `FromBytesWithInput` bound. The method in this trait /// will be called with `input_expression` as the input provided. /// * `#[neli(input)]` which will transparently pass the input /// provided in the `FromBytesWithInput` method through to the /// `FromBytesWithInput` method for this field unchanged according /// to the rules described above. /// * `#[neli(size = "size_var_name")]` which allows specifying a size of the data type /// that is different from the input specified by `#[neli(input)]`. Not specifying /// this attribute defaults to using `input` as the size as well. /// /// Implicit type parameter bound: `FromBytes`. #[proc_macro_derive(FromBytes, attributes(neli))] pub fn proc_macro_frombytes(ts: TokenStream) -> TokenStream { let item = parse::(ts).unwrap(); TokenStream::from(match item { Item::Struct(strct) => impl_frombytes_struct(strct, "FromBytes", "from_bytes", None, None), _ => panic!("FromBytes can only be derived for structs"), }) } /// Derives the neli `FromBytesWithInput` trait for a struct. /// /// Acceptable struct attribute is: /// * `#[neli(from_bytes_bound = "T: MyTrait")]` which will generate /// a trait bound in the impl for the specified type parameter. /// * `#[neli(padding)]` which will add special handling for padding /// for this struct. /// /// Acceptable field attribute forms are: /// * `#[neli(input = "input_expression")]` which may only be used /// once for a struct. The behavior of this attribute is that a /// bound requirement will change from the implicit `FromBytes` to /// an implicit `FromBytesWithInput` bound. The method in this trait /// will be called with `input_expression` as the input provided. /// * `#[neli(input)]` which will transparently pass the input /// provided in the `FromBytesWithInput` method through to the /// `FromBytesWithInput` method for this field unchanged according /// to the rules described above. /// * `#[neli(size = "size_var_name")]` which allows specifying a size of the data type /// that is different from the input specified by `#[neli(input)]`. Not specifying /// this attribute defaults to using `input` as the size as well. /// /// Implicit type parameter bound: `FromBytes`. #[proc_macro_derive(FromBytesWithInput, attributes(neli))] pub fn proc_macro_frombyteswithinput(ts: TokenStream) -> TokenStream { let item = parse::(ts).unwrap(); TokenStream::from(match item { Item::Struct(strct) => impl_frombytes_struct( strct, "FromBytesWithInput", "from_bytes_with_input", Some(quote! { type Input = usize; }), Some(quote! { , input: Self::Input }), ), _ => panic!("FromBytesWithInput can only be derived for structs"), }) } /// Derives the neli `ToBytes` trait for a struct or enum. /// /// Acceptable struct attribute is: /// * `#[neli(to_bytes_bound = "T: MyTrait")]` which will generate a /// trait bound in the impl for the specified type parameter. /// * `#[neli(padding)]` which will add special handling for padding /// for this struct. /// /// Implicit type parameter bound: `ToBytes`. #[proc_macro_derive(ToBytes, attributes(neli))] pub fn proc_macro_tobytes(ts: TokenStream) -> TokenStream { let item = parse::(ts).unwrap(); TokenStream::from(match item { Item::Struct(strct) => impl_tobytes_struct(strct), Item::Enum(enm) => impl_tobytes_enum(enm), _ => panic!("ToBytes can only be derived for structs and enums"), }) } neli-proc-macros-0.2.1/src/neli_enum.rs000064400000000000000000000121611046102023000161070ustar 00000000000000use proc_macro::TokenStream; use proc_macro2::{Span, TokenStream as TokenStream2}; use quote::quote; use syn::{ Arm, Attribute, Expr, Ident, ItemEnum, Lit, Meta, Path, Token, Type, Variant, parse, parse_str, }; use crate::shared::remove_bad_attrs; fn parse_type_attr(attr: Meta) -> Type { if let Meta::NameValue(nv) = attr { if nv.path == parse_str::("serialized_type").unwrap() { if let Expr::Lit(el) = nv.value { if let Lit::Str(ls) = el.lit { return parse_str::(&ls.value()) .unwrap_or_else(|_| panic!("Invalid type supplied: {}", ls.value())); } } } } panic!("Attribute in the form #[neli(serialized_type = \"TYPE_LITERAL_STR\")] required") } fn parse_enum(enm: &mut ItemEnum, ty: &Type) -> Vec<(Vec, Ident, Expr)> { let exprs = enm .variants .iter_mut() .map(|var| { if let Some((_, expr)) = var.discriminant.take() { (var.attrs.clone(), var.ident.clone(), expr) } else { panic!("All variants in the provided enum require an expression assignment") } }) .collect(); if !enm.variants.trailing_punct() { enm.variants.push_punct(Token![,](Span::call_site())); } enm.variants.push_value( parse::(TokenStream::from(quote! { UnrecognizedConst(#ty) })) .expect("Could not parse tokens as a variant"), ); exprs } fn parse_from_info( enum_name: Ident, var_info: Vec<(Vec, Ident, Expr)>, ) -> (Vec, Vec) { let mut from_const_info = Vec::new(); let mut from_type_info = Vec::new(); for (mut attributes, ident, expr) in var_info { attributes = remove_bad_attrs(attributes); let mut from_const_arm = parse::(TokenStream::from(quote! { #( #attributes )* i if i == #expr => #enum_name::#ident, })) .expect("Failed to parse tokens as a match arm"); from_const_arm.attrs = attributes.clone(); from_const_info.push(from_const_arm); let mut from_type_arm = parse::(TokenStream::from(quote! { #( #attributes )* #enum_name::#ident => #expr, })) .expect("Failed to parse tokens as a match arm"); from_type_arm.attrs = attributes.clone(); from_type_info.push(from_type_arm); } (from_const_info, from_type_info) } pub fn generate_neli_enum(mut enm: ItemEnum, meta: Meta) -> TokenStream2 { let enum_name = enm.ident.clone(); let ty = parse_type_attr(meta); let variant_info = parse_enum(&mut enm, &ty); let (from_const_info, from_type_info) = parse_from_info(enum_name.clone(), variant_info); quote! { #[derive(Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] #[allow(missing_docs)] #enm impl #enum_name { /// Check whether a given method is an unrecognized /// constant for the set of possible constants /// associated with the current type. pub fn is_unrecognized(&self) -> bool { match *self { #enum_name::UnrecognizedConst(_) => true, _ => false, } } } impl neli::Size for #enum_name { fn unpadded_size(&self) -> usize { std::mem::size_of::<#ty>() } } impl neli::TypeSize for #enum_name { fn type_size() -> usize { std::mem::size_of::<#ty>() } } impl neli::ToBytes for #enum_name { fn to_bytes(&self, buffer: &mut std::io::Cursor>) -> Result<(), neli::err::SerError> { let bin_rep: #ty = self.into(); bin_rep.to_bytes(buffer) } } impl neli::FromBytes for #enum_name { fn from_bytes(buffer: &mut std::io::Cursor>) -> Result { Ok(#enum_name::from(<#ty as neli::FromBytes>::from_bytes( buffer )?)) } } impl From<#ty> for #enum_name { fn from(cnst: #ty) -> Self { match cnst { #( #from_const_info )* i => #enum_name::UnrecognizedConst(i), } } } impl From<#enum_name> for #ty { fn from(enm: #enum_name) -> Self { match enm { #( #from_type_info )* #enum_name::UnrecognizedConst(i) => i, } } } impl From<&#enum_name> for #ty { fn from(enm: &#enum_name) -> Self { match *enm { #( #from_type_info )* #enum_name::UnrecognizedConst(i) => i, } } } } } neli-proc-macros-0.2.1/src/shared.rs000064400000000000000000000402541046102023000154060ustar 00000000000000use std::{any::type_name, collections::HashMap}; use proc_macro2::{Span, TokenStream as TokenStream2, TokenTree}; use quote::{ToTokens, quote}; use syn::{ Attribute, Expr, Fields, FieldsNamed, FieldsUnnamed, GenericParam, Generics, Ident, Index, ItemStruct, LitStr, Meta, MetaNameValue, Path, PathArguments, PathSegment, Token, TraitBound, TraitBoundModifier, Type, TypeParam, TypeParamBound, Variant, parse::Parse, parse_str, punctuated::Punctuated, token::{PathSep, Plus}, }; /// Represents a field as either an identifier or an index. pub enum FieldRepr { Index(Index), Ident(Ident), } impl ToTokens for FieldRepr { fn to_tokens(&self, tokens: &mut TokenStream2) { match self { FieldRepr::Index(i) => i.to_tokens(tokens), FieldRepr::Ident(i) => i.to_tokens(tokens), } } } /// Represents the field name, type, and all attributes associated /// with this field. pub struct FieldInfo { field_name: FieldRepr, field_type: Type, field_attrs: Vec, } impl FieldInfo { /// Convert field info to a tuple. fn into_tuple(self) -> (FieldRepr, Type, Vec) { (self.field_name, self.field_type, self.field_attrs) } /// Convert a vector of [`FieldInfo`]s to a tuple of vectors /// each containing name, type, or attributes. pub fn to_vecs(v: I) -> (Vec, Vec, Vec>) where I: Iterator, { v.into_iter().fold( (Vec::new(), Vec::new(), Vec::new()), |(mut names, mut types, mut attrs), info| { let (name, ty, attr) = info.into_tuple(); names.push(name); types.push(ty); attrs.push(attr); (names, types, attrs) }, ) } } /// Necessary information for a given struct to generate trait /// implementations. pub struct StructInfo { struct_name: Ident, generics: Generics, generics_without_bounds: Generics, field_info: Vec, padded: bool, } type StructInfoTuple = ( Ident, Generics, Generics, Vec, Vec, Vec>, bool, ); impl StructInfo { /// Extract the necessary information from an /// [`ItemStruct`][syn::ItemStruct] data structure. pub fn from_item_struct( i: ItemStruct, trait_name: Option<&str>, trait_bound_path: &str, uses_self: bool, ) -> Self { let (mut generics, generics_without_bounds) = process_impl_generics(i.generics, trait_name); let trait_bounds = process_trait_bounds(&i.attrs, trait_bound_path); override_trait_bounds_on_generics(&mut generics, &trait_bounds); let field_info = match i.fields { Fields::Named(fields_named) => generate_named_fields(fields_named), Fields::Unnamed(fields_unnamed) => generate_unnamed_fields(fields_unnamed, uses_self), Fields::Unit => Vec::new(), }; let padded = process_padding(&i.attrs); StructInfo { struct_name: i.ident, generics, generics_without_bounds, field_info, padded, } } /// Remove the last field from the record. pub fn pop_field(&mut self) { let _ = self.field_info.pop(); } /// Convert all necessary struct information into a tuple of /// values. pub fn into_tuple(mut self) -> StructInfoTuple { let (field_names, field_types, field_attrs) = self.field_info(); ( self.struct_name, self.generics, self.generics_without_bounds, field_names, field_types, field_attrs, self.padded, ) } /// Convert all field information into a tuple. fn field_info(&mut self) -> (Vec, Vec, Vec>) { FieldInfo::to_vecs(self.field_info.drain(..)) } } /// Convert a list of identifiers into a path where the path segments /// are added in the order that they appear in the list. fn path_from_idents(idents: &[&str]) -> Path { Path { leading_colon: None, segments: idents .iter() .map(|ident| PathSegment { ident: Ident::new(ident, Span::call_site()), arguments: PathArguments::None, }) .collect::>(), } } /// Process all type parameters in the type parameter definition for /// an `impl` block. Optionally add a trait bound for all type parameters /// if `required_trait` is `Some(_)`. /// /// The first return value in the tuple is the list of type parameters /// with trait bounds added. The second argument is a list of type /// parameters without trait bounds to be passed into the type parameter /// list for a struct. /// /// # Example: /// ## impl block /// /// ```no_compile /// trait MyTrait {} /// /// impl MyStruct { /// fn nothing() {} /// } /// ``` /// /// ## Method call /// `neli_proc_macros::process_impl_generics(generics, Some("MyTrait"))` /// /// ## Result /// ```no_compile /// (, ) /// ``` /// /// or rather: /// /// ```no_compile /// impl MyStruct { /// fn nothing() {} /// } /// ``` pub fn process_impl_generics( mut generics: Generics, required_trait: Option<&str>, ) -> (Generics, Generics) { if let Some(rt) = required_trait { for generic in generics.params.iter_mut() { if let GenericParam::Type(param) = generic { param.colon_token = Some(Token![:](Span::call_site())); param.bounds.push(TypeParamBound::Trait(TraitBound { paren_token: None, modifier: TraitBoundModifier::None, lifetimes: None, path: path_from_idents(&["neli", rt]), })); param.eq_token = None; param.default = None; } } } let mut generics_without_bounds: Generics = generics.clone(); for generic in generics_without_bounds.params.iter_mut() { if let GenericParam::Type(param) = generic { param.colon_token = None; param.bounds.clear(); param.eq_token = None; param.default = None; } } (generics, generics_without_bounds) } /// Remove attributes that should not be carried over to an `impl` /// definition and only belong in the data structure like documentation /// attributes. pub fn remove_bad_attrs(attrs: Vec) -> Vec { attrs .into_iter() .filter(|attr| match &attr.meta { Meta::NameValue(MetaNameValue { path, .. }) => !path.is_ident("doc"), _ => true, }) .collect() } /// Generate a pattern and associated expression for each variant /// in an enum. fn generate_pat_and_expr( enum_name: Ident, var_name: Ident, fields: Fields, generate_named_pat_and_expr: &N, generate_unnamed_pat_and_expr: &U, unit: &TokenStream2, ) -> TokenStream2 where N: Fn(Ident, Ident, FieldsNamed) -> TokenStream2, U: Fn(Ident, Ident, FieldsUnnamed) -> TokenStream2, { match fields { Fields::Named(fields) => generate_named_pat_and_expr(enum_name, var_name, fields), Fields::Unnamed(fields) => generate_unnamed_pat_and_expr(enum_name, var_name, fields), Fields::Unit => quote! { #enum_name::#var_name => #unit, }, } } /// Convert an enum variant into an arm of a match statement. fn generate_arm( attrs: Vec, enum_name: Ident, var_name: Ident, fields: Fields, generate_named_pat_and_expr: &N, generate_unnamed_pat_and_expr: &U, unit: &TokenStream2, ) -> TokenStream2 where N: Fn(Ident, Ident, FieldsNamed) -> TokenStream2, U: Fn(Ident, Ident, FieldsUnnamed) -> TokenStream2, { let attrs = remove_bad_attrs(attrs) .into_iter() .map(|attr| attr.meta) .collect::>(); let arm = generate_pat_and_expr( enum_name, var_name, fields, generate_named_pat_and_expr, generate_unnamed_pat_and_expr, unit, ); quote! { #( #attrs )* #arm } } /// Generate all arms of a match statement. pub fn generate_arms( enum_name: Ident, variants: Vec, generate_named_pat_and_expr: N, generate_unnamed_pat_and_expr: U, unit: TokenStream2, ) -> Vec where N: Fn(Ident, Ident, FieldsNamed) -> TokenStream2, U: Fn(Ident, Ident, FieldsUnnamed) -> TokenStream2, { variants .into_iter() .map(|var| { let variant_name = var.ident; generate_arm( var.attrs, enum_name.clone(), variant_name, var.fields, &generate_named_pat_and_expr, &generate_unnamed_pat_and_expr, &unit, ) }) .collect() } /// Generate a list of named fields in accordance with the struct. pub fn generate_named_fields(fields: FieldsNamed) -> Vec { fields .named .into_iter() .fold(Vec::new(), |mut info, field| { info.push(FieldInfo { field_name: FieldRepr::Ident(field.ident.expect("Must be named")), field_type: field.ty, field_attrs: field.attrs, }); info }) } /// Generate unnamed fields as either indicies to be accessed using /// `self` or placeholder variable names for match-style patterns. pub fn generate_unnamed_fields(fields: FieldsUnnamed, uses_self: bool) -> Vec { fields .unnamed .into_iter() .enumerate() .fold(Vec::new(), |mut fields, (index, field)| { fields.push(FieldInfo { field_name: if uses_self { FieldRepr::Index(Index { index: index as u32, span: Span::call_site(), }) } else { FieldRepr::Ident(Ident::new( &String::from((b'a' + index as u8) as char), Span::call_site(), )) }, field_type: field.ty, field_attrs: field.attrs, }); fields }) } /// Returns [`true`] if the given attribute is present in the list. fn attr_present(attrs: &[Attribute], attr_name: &str) -> bool { for attr in attrs { if let Meta::List(list) = &attr.meta { if list.path.is_ident("neli") { for token in list.tokens.clone() { if let TokenTree::Ident(ident) = token { if ident == attr_name { return true; } } } } } } false } /// Process attributes to find all attributes with the name `attr_name`. /// Return a [`Vec`] of [`Option`] types with the associated literal parsed /// into type parameter `T`. `T` must allow parsing from a string to be /// used with this method. fn process_attr(attrs: &[Attribute], attr_name: &str) -> Vec> where T: Parse, { let mut output = Vec::new(); for attr in attrs { if attr.path().is_ident("neli") { attr.parse_nested_meta(|meta| { let literal_str = match meta.value() { Ok(value) => match value.parse::() { Ok(v) => Some(v.value()), Err(_) => panic!("Cannot have a bare ="), }, Err(_) => None, }; if meta.path.is_ident(attr_name) { match literal_str { Some(l) => { output.push(Some(parse_str::(&l).unwrap_or_else(|_| { panic!("{} should be valid tokens of type {}", l, type_name::()) }))); } None => { output.push(None); } } } Ok(()) }) .unwrap_or_else(|e| { panic!( "{}", format!("Should be able to parse all nested attributes: {e}") ) }); } } output } pub fn process_trait_bounds(attrs: &[Attribute], trait_bound_path: &str) -> Vec { process_attr(attrs, trait_bound_path) .into_iter() .flatten() .collect() } /// Handles the attribute `#[neli(padding)]`. pub fn process_padding(attrs: &[Attribute]) -> bool { attr_present(attrs, "padding") } /// Handles the attribute `#[neli(input)]` or `#[neli(input = "...")]` /// when deriving [`FromBytes`][neli::FromBytes] implementations. /// /// Returns: /// * [`None`] if the attribute is not present /// * [`Some(None)`] if the attribute is present and has no /// associated expression /// * [`Some(Some(_))`] if the attribute is present and /// has an associated expression pub fn process_input(attrs: &[Attribute]) -> Option> { let mut exprs = process_attr(attrs, "input"); if exprs.len() > 1 { panic!("Only one instance of the attribute allowed for attribute #[neli(input = \"...\")]"); } else { exprs.pop() } } /// Handles the attribute `#[neli(skip_debug)]` /// when deriving [`FromBytes`][neli::FromBytes] implementations. /// This removes the restriction for the field to have [`TypeSize`][neli::TypeSize] /// implemented by skipping buffer trace logging for this field. /// /// Returns: /// * [`false`] if the attribute is not present /// * [`true`] if the attribute is present pub fn process_skip_debug(attrs: &[Attribute]) -> bool { let exprs = process_attr::(attrs, "skip_debug"); if exprs.is_empty() { false } else if exprs.iter().any(|expr| expr.is_some()) { panic!("No input expressions allowed for #[neli(skip_debug)]") } else { true } } /// Handles the attribute `#[neli(size = "...")]` /// when deriving [`FromBytes`][neli::FromBytes] implementations. /// /// Returns: /// * [`None`] if the attribute is not present /// associated expression /// * [`Some(_)`] if the attribute is present and has an associated expression pub fn process_size(attrs: &[Attribute]) -> Option { let mut exprs = process_attr(attrs, "size"); if exprs.len() > 1 { panic!("Only one input expression allowed for attribute #[neli(size = \"...\")]"); } else { exprs .pop() .map(|opt| opt.expect("#[neli(size = \"...\")] must have associated expression")) } } /// Allow overriding the trait bounds specified by the method /// [`process_impl_generics`][process_impl_generics]. /// /// # Example /// ```no_compile /// use std::marker::PhantomData; /// /// struct MyStruct(PhantomData, PhantomData); /// /// trait MyTrait {} /// trait AnotherTrait {} /// /// // Input /// /// impl MyStruct { /// fn nothing() {} /// } /// /// // Result /// /// impl MyStruct { /// fn nothing() {} /// } /// ``` fn override_trait_bounds_on_generics(generics: &mut Generics, trait_bound_overrides: &[TypeParam]) { let mut overrides = trait_bound_overrides.iter().cloned().fold( HashMap::>::new(), |mut map, param| { if let Some(bounds) = map.get_mut(¶m.ident) { bounds.extend(param.bounds); } else { map.insert(param.ident, param.bounds); } map }, ); for generic in generics.params.iter_mut() { if let GenericParam::Type(ty) = generic { let ident = &ty.ident; if let Some(ors) = overrides.remove(ident) { ty.colon_token = Some(Token![:](Span::call_site())); ty.bounds = ors; ty.eq_token = None; ty.default = None; } } } }