diff --git a/Cargo.toml b/Cargo.toml index 4d8a3286..6e5c57fe 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,7 +20,5 @@ proc-macro2 = "1.0.32" quote = "1.0.10" syn = "1.0.82" smallvec = "1.8.0" - -[dev-dependencies] serde = { version = "1.0.130", features = ["derive"] } serde_json = "1.0.72" diff --git a/build.rs b/build.rs new file mode 100644 index 00000000..05b2fa29 --- /dev/null +++ b/build.rs @@ -0,0 +1,2 @@ +// Dummy build.rs to ensure OUT_DIR environment variable is set for tests and examples. +fn main() {} diff --git a/examples/features.rs b/examples/features.rs new file mode 100644 index 00000000..d93703de --- /dev/null +++ b/examples/features.rs @@ -0,0 +1,97 @@ +// TODO: make this into a test +use serde::{Deserialize, Serialize}; +use superstruct::superstruct; + +#[derive(Serialize, Deserialize, PartialEq, Debug)] +enum ForkName { + Bellatrix, + Capella, + Deneb, + Electra, +} + +#[derive(Serialize, Deserialize, PartialEq, Debug)] +enum FeatureName { + Merge, + Withdrawals, + Blobs, + EIP6110, + Verge, + EIP7549, +} + +#[superstruct(variants_and_features_decl = "FORK_ORDER")] +const FORK_ORDER: &[(ForkName, &[FeatureName])] = &[ + (ForkName::Bellatrix, &[FeatureName::Merge]), + (ForkName::Capella, &[FeatureName::Withdrawals]), + ( + ForkName::Electra, + &[FeatureName::EIP6110, FeatureName::Verge], + ), +]; + +#[superstruct(feature_dependencies_decl = "FEATURE_DEPENDENCIES")] +const FEATURE_DEPENDENCIES: &[(FeatureName, &[FeatureName])] = &[ + (FeatureName::Withdrawals, &[FeatureName::Merge]), + (FeatureName::Blobs, &[FeatureName::Withdrawals]), + (FeatureName::EIP6110, &[FeatureName::Merge]), + (FeatureName::Verge, &[FeatureName::Merge]), +]; + +#[superstruct( + variants_and_features_from = "FORK_ORDER", + feature_dependencies = "FEATURE_DEPENDENCIES", + variant_type(name = "ForkName", getter = "fork_name"), + feature_type( + name = "FeatureName", + list = "feature_names", + check = "check_feature_enabled" + ) +)] +struct Block { + historical_updates: String, + #[superstruct(feature(Withdrawals))] + historical_summaries: String, + #[superstruct(feature(Withdrawals))] // in the Withdrawals fork, and all subsequent + withdrawals: Vec, + #[superstruct(feature(Blobs))] // if Blobs is not enabled, this is completely disabled + blobs: Vec, + #[superstruct(feature(EIP6110))] + deposits: Vec, +} + +#[superstruct( + feature(Withdrawals), + variants_and_features_from = "FORK_ORDER", + feature_dependencies = "FEATURE_DEPENDENCIES", + variant_type(name = "ForkName", getter = "fork_name"), + feature_type( + name = "FeatureName", + list = "feature_names", + check = "check_feature_enabled" + ) +)] +struct Payload { + transactions: Vec, +} + +fn main() { + let block = Block::Electra(BlockElectra { + historical_updates: "hey".into(), + historical_summaries: "thing".into(), + withdrawals: vec![1, 2, 3], + deposits: vec![0, 0, 0, 0, 0, 0], + }); + + assert_eq!(block.fork_name(), ForkName::Electra); + assert_eq!( + block.feature_names(), + vec![ + FeatureName::Merge, + FeatureName::Withdrawals, + FeatureName::EIP6110, + FeatureName::Verge + ] + ); + assert!(block.check_feature_enabled(FeatureName::EIP6110)); +} diff --git a/src/feature_expr.rs b/src/feature_expr.rs new file mode 100644 index 00000000..5f8bb919 --- /dev/null +++ b/src/feature_expr.rs @@ -0,0 +1,75 @@ +use darling::{Error, FromMeta}; +use syn::{Ident, Meta, NestedMeta}; + +/// A cfg-like expression in terms of features, which can be evaluated for each fork at each field +/// to determine whether that field is turned on. +#[derive(Debug)] +pub enum FeatureExpr { + And(Box, Box), + Or(Box, Box), + Not(Box), + Literal(Ident), +} + +fn parse(meta: NestedMeta) -> Result { + match meta { + // TODO: assert 1 segment + NestedMeta::Meta(Meta::Path(path)) => Ok(FeatureExpr::Literal( + path.segments.last().unwrap().ident.clone(), + )), + NestedMeta::Meta(Meta::List(meta_list)) => { + let segments = &meta_list.path.segments; + assert_eq!(segments.len(), 1); + let operator = &segments.last().unwrap().ident; + match operator.to_string().as_str() { + "and" => { + let mut nested = meta_list.nested; + assert_eq!(nested.len(), 2, "`and` should have exactly 2 operands"); + let right_meta = nested.pop().unwrap().into_value(); + let left_meta = nested.pop().unwrap().into_value(); + Ok(FeatureExpr::And( + Box::new(parse(left_meta)?), + Box::new(parse(right_meta)?), + )) + } + "or" => { + let mut nested = meta_list.nested; + assert_eq!(nested.len(), 2, "`or` should have exactly 2 operands"); + let right_meta = nested.pop().unwrap().into_value(); + let left_meta = nested.pop().unwrap().into_value(); + Ok(FeatureExpr::Or( + Box::new(parse(left_meta)?), + Box::new(parse(right_meta)?), + )) + } + "not" => { + let mut nested = meta_list.nested; + assert_eq!(nested.len(), 1, "`not` should have exactly 1 operand"); + let inner_meta = nested.pop().unwrap().into_value(); + Ok(FeatureExpr::Not(Box::new(parse(inner_meta)?))) + } + op => panic!("unsupported operator: {op}"), + } + } + _ => panic!("unexpected feature expr: {meta:?}"), + } +} + +impl FromMeta for FeatureExpr { + fn from_list(items: &[NestedMeta]) -> Result { + assert_eq!(items.len(), 1, "feature expr should have 1 part"); + let expr_meta = items.first().cloned().unwrap(); + parse(expr_meta) + } +} + +impl FeatureExpr { + pub fn eval(&self, features: &[Ident]) -> bool { + match self { + Self::Literal(feature_name) => features.contains(&feature_name), + Self::And(left, right) => left.eval(features) && right.eval(features), + Self::Or(left, right) => left.eval(features) || right.eval(features), + Self::Not(inner) => !inner.eval(features), + } + } +} diff --git a/src/feature_getters.rs b/src/feature_getters.rs new file mode 100644 index 00000000..e51e0744 --- /dev/null +++ b/src/feature_getters.rs @@ -0,0 +1,113 @@ +use crate::{FeatureTypeOpts, VariantTypeOpts}; +use proc_macro2::{Span, TokenStream}; +use quote::quote; +use std::collections::HashMap; +use syn::Ident; + +const DEFAULT_VARIANT_TYPE_GETTER: &str = "variant_type"; +const DEFAULT_FEATURE_TYPE_LIST: &str = "list_all_features"; +const DEFAULT_FEATURE_TYPE_CHECK: &str = "is_feature_enabled"; + +pub fn get_feature_getters( + type_name: &Ident, + variant_names: &[Ident], + all_variant_features_opts: Option>>, + variant_type_opts: &Option, + feature_type_opts: &Option, +) -> Vec { + let Some(variant_type) = variant_type_opts else { + return vec![]; + }; + let Some(feature_type) = feature_type_opts else { + return vec![]; + }; + let Some(all_variant_features) = all_variant_features_opts else { + return vec![]; + }; + + let mut output = vec![]; + + output.extend(get_variant_type_getters( + type_name, + variant_names, + variant_type, + )); + output.extend(get_feature_type_getters( + type_name, + variant_names, + all_variant_features, + feature_type, + )); + output +} + +pub fn get_variant_type_getters( + type_name: &Ident, + variant_names: &[Ident], + variant_type: &VariantTypeOpts, +) -> Vec { + let variant_type_name = &variant_type.name; + let getter_name = variant_type + .getter + .clone() + .unwrap_or_else(|| Ident::new(DEFAULT_VARIANT_TYPE_GETTER, Span::call_site())); + let getter = quote! { + pub fn #getter_name(&self) -> #variant_type_name { + match self { + #( + #type_name::#variant_names(..) => #variant_type_name::#variant_names, + )* + } + } + }; + vec![getter.into()] +} + +pub fn get_feature_type_getters( + type_name: &Ident, + variant_names: &[Ident], + all_variant_features: HashMap>, + feature_type: &FeatureTypeOpts, +) -> Vec { + let feature_type_name = &feature_type.name; + let list_features = feature_type + .list + .clone() + .unwrap_or_else(|| Ident::new(DEFAULT_FEATURE_TYPE_LIST, Span::call_site())); + + let mut feature_sets: Vec> = vec![]; + + for variant in variant_names { + // If not all variants are defined for this type, then skip. + if let Some(feature_set) = all_variant_features.get(variant) { + feature_sets.push(feature_set.clone()); + } else { + continue; + } + } + + let feature_list = quote! { + pub fn #list_features(&self) -> &'static [#feature_type_name] { + match self { + #( + #type_name::#variant_names(..) => &[#(#feature_type_name::#feature_sets),*], + )* + } + } + }; + + let check_feature = feature_type + .check + .clone() + .unwrap_or_else(|| Ident::new(DEFAULT_FEATURE_TYPE_CHECK, Span::call_site())); + let feature_check = quote! { + pub fn #check_feature(&self, feature: #feature_type_name) -> bool { + match self { + #( + #type_name::#variant_names(..) => self.#list_features().contains(&feature), + )* + } + } + }; + vec![feature_list.into(), feature_check.into()] +} diff --git a/src/lib.rs b/src/lib.rs index d6b675a7..eaf4ebed 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,5 +1,6 @@ use attributes::{IdentList, NestedMetaList}; use darling::FromMeta; +use feature_expr::FeatureExpr; use from::{ generate_from_enum_trait_impl_for_ref, generate_from_variant_trait_impl, generate_from_variant_trait_impl_for_ref, @@ -10,13 +11,17 @@ use proc_macro::TokenStream; use proc_macro2::{Span, TokenStream as TokenStream2}; use quote::{format_ident, quote, ToTokens}; use std::collections::HashMap; +use std::fs::File; use std::iter::{self, FromIterator}; +use std::path::PathBuf; use syn::{ - parse_macro_input, Attribute, AttributeArgs, Expr, Field, GenericParam, Ident, ItemStruct, - Lifetime, LifetimeDef, Type, TypeGenerics, TypeParamBound, + parse_macro_input, Attribute, AttributeArgs, Expr, Field, GenericParam, Ident, ItemConst, + ItemStruct, Lifetime, LifetimeDef, Type, TypeGenerics, TypeParamBound, }; mod attributes; +mod feature_expr; +mod feature_getters; mod from; mod macros; mod naming; @@ -26,7 +31,8 @@ mod utils; #[derive(Debug, FromMeta)] struct StructOpts { /// List of variant names of the superstruct being derived. - variants: IdentList, + #[darling(default)] + variants: Option, /// List of attributes to apply to the variant structs. #[darling(default)] variant_attributes: Option, @@ -60,6 +66,29 @@ struct StructOpts { /// List of other superstruct types to generate mappings into from RefMut. #[darling(default)] map_ref_mut_into: Option, + + /* + * FEATURE EXPERIMENT + */ + #[darling(default)] + variants_and_features_from: Option, + #[darling(default)] + feature_dependencies: Option, + #[darling(default)] + variant_type: Option, + #[darling(default)] + feature_type: Option, + #[darling(default)] + feature: Option, + + // variant_type(name = "ForkName", getter = "fork_name") + // feature_type(name = "FeatureName", list = "list_all_features", check = "is_feature_enabled") + + // Separate invocations + #[darling(default)] + variants_and_features_decl: Option, + #[darling(default)] + feature_dependencies_decl: Option, } /// Field-level configuration. @@ -71,6 +100,12 @@ struct FieldOpts { getter: Option, #[darling(default)] partial_getter: Option, + + /* + * FEATURE EXPERIMENT + */ + #[darling(default)] + feature: Option, } /// Getter configuration for a specific field @@ -92,6 +127,22 @@ struct ErrorOpts { expr: Option, } +#[derive(Debug, FromMeta)] +struct VariantTypeOpts { + name: Ident, + #[darling(default)] + getter: Option, +} + +#[derive(Debug, FromMeta)] +struct FeatureTypeOpts { + name: Ident, + #[darling(default)] + list: Option, + #[darling(default)] + check: Option, +} + impl ErrorOpts { fn parse(&self) -> Option<(Type, Expr)> { let err_ty_str = self.ty.as_ref()?; @@ -122,19 +173,195 @@ struct FieldData { name: Ident, field: Field, only: Option>, + feature: Option, + /// Variants for which this field is enabled. + variants: Vec, getter_opts: GetterOpts, partial_getter_opts: GetterOpts, } impl FieldData { fn is_common(&self) -> bool { - self.only.is_none() + self.only.is_none() && self.feature.is_none() + } +} + +/// Return list of variants and mapping from variants to their full list of enabled features. +fn get_variant_and_feature_names( + opts: &StructOpts, +) -> (Vec, Option>>) { + // Fixed list of variants. + if let Some(variants) = &opts.variants { + assert!( + opts.variants_and_features_from.is_none(), + "cannot have variants and variants_and_features_from" + ); + return (variants.idents.clone(), None); + } + + // Dynamic list of variants and features. + let Some(variants_and_features_from) = &opts.variants_and_features_from else { + panic!("either variants or variants_and_features_from must be set"); + }; + let Some(feature_dependencies) = &opts.feature_dependencies else { + panic!("variants_and_features_from requires feature_dependencies"); + }; + + if opts.variant_type.is_none() || opts.feature_type.is_none() { + panic!("variant_type and feature_type must be defined"); + } + + let starting_feature: Option = opts + .feature + .as_ref() + .map(|f| { + assert!(f.idents.len() == 1, "feature must be singular"); + f.idents.first() + }) + .flatten() + .cloned(); + + let target_dir = get_cargo_target_dir().expect("your crate needs a build.rs"); + + let variants_path = target_dir.join(format!("{variants_and_features_from}.json")); + let features_path = target_dir.join(format!("{feature_dependencies}.json")); + + let variants_file = File::open(&variants_path).expect("variants_and_features file exists"); + let features_file = File::open(&features_path).expect("feature_dependencies file exists"); + + let mut variants_and_features: Vec<(String, Vec)> = + serde_json::from_reader(variants_file).unwrap(); + let feature_dependencies: Vec<(String, Vec)> = + serde_json::from_reader(features_file).unwrap(); + + // Sanity check dependency graph. + // Create list of features enabled at each variant (cumulative). + let mut variant_features_cumulative: HashMap> = HashMap::new(); + for (i, (variant, features)) in variants_and_features.iter().enumerate() { + let variant_features = variant_features_cumulative + .entry(variant.clone()) + .or_default(); + + for (_, prior_features) in variants_and_features.iter().take(i) { + variant_features.extend_from_slice(prior_features); + } + variant_features.extend_from_slice(features); } + + // Check dependency graph. + for (feature, dependencies) in feature_dependencies { + for (variant, _) in &variants_and_features { + let cumulative_features = variant_features_cumulative.get(variant).unwrap(); + if cumulative_features.contains(&feature) { + // All feature dependencies are enabled for this variant. + for dependency in &dependencies { + if !cumulative_features.contains(&dependency) { + panic!("feature {feature} depends on {dependency} but it is not enabled for variant {variant}") + } + } + } + } + } + + // In some instances, we might want to restrict what variants are generated for a type. + // In this case, a `starting_feature` is defined and we only include variants starting from + // the first variant to include that feature as a dependency. + let starting_index = if let Some(feature) = starting_feature { + variants_and_features + .iter() + .position(|(_, deps)| deps.iter().any(|f| *f == feature.to_string())) + .expect("variants_and_features does not contain the required feature") + } else { + 0 + }; + variants_and_features = variants_and_features[starting_index..].to_vec(); + + let variants = variants_and_features + .iter() + .map(|(variant, _)| Ident::new(variant, Span::call_site())) + .collect(); + + let variant_features_cumulative_idents = variant_features_cumulative + .into_iter() + .map(|(variant, features)| { + ( + Ident::new(&variant, Span::call_site()), + features + .into_iter() + .map(|feature| Ident::new(&feature, Span::call_site())) + .collect(), + ) + }) + .collect(); + + (variants, Some(variant_features_cumulative_idents)) } #[proc_macro_attribute] pub fn superstruct(args: TokenStream, input: TokenStream) -> TokenStream { let attr_args = parse_macro_input!(args as AttributeArgs); + let opts = StructOpts::from_list(&attr_args).unwrap(); + + // Early return for "helper" invocations. + if opts.variants_and_features_decl.is_some() || opts.feature_dependencies_decl.is_some() { + let decl_name = opts + .variants_and_features_decl + .or(opts.feature_dependencies_decl) + .unwrap(); + let input2 = input.clone(); + let item = parse_macro_input!(input2 as ItemConst); + + let Expr::Reference(ref_expr) = *item.expr else { + panic!("ref bad"); + }; + let Expr::Array(array_expr) = *ref_expr.expr else { + panic!("bad"); + }; + + fn path_to_string(e: &Expr) -> String { + let Expr::Path(path) = e else { + panic!("path bad"); + }; + let last_segment_str = path.path.segments.iter().last().unwrap().ident.to_string(); + last_segment_str + } + + let data: Vec<(String, Vec)> = array_expr + .elems + .iter() + .map(|expr| { + let Expr::Tuple(tuple_expr) = expr else { + panic!("bad2"); + }; + let tuple_parts = tuple_expr.elems.iter().cloned().collect::>(); + assert_eq!(tuple_parts.len(), 2); + + let variant_name = path_to_string(&tuple_parts[0]); + + let Expr::Reference(feature_ref_expr) = tuple_parts[1].clone() else { + panic!("ref bad"); + }; + let Expr::Array(feature_array_expr) = *feature_ref_expr.expr else { + panic!("bad"); + }; + let feature_names = feature_array_expr + .elems + .iter() + .map(|expr| path_to_string(expr)) + .collect::>(); + + (variant_name, feature_names) + }) + .collect::>(); + + let target_dir = get_cargo_target_dir().expect("your crate needs a build.rs"); + let output_path = target_dir.join(format!("{decl_name}.json")); + let output_file = File::create(output_path).expect("create output file"); + serde_json::to_writer(output_file, &data).expect("write output file"); + + return input; + } + let item = parse_macro_input!(input as ItemStruct); let type_name = &item.ident; @@ -144,13 +371,11 @@ pub fn superstruct(args: TokenStream, input: TokenStream) -> TokenStream { // Generics used for the impl block. let (impl_generics, ty_generics, where_clause) = &item.generics.split_for_impl(); - let opts = StructOpts::from_list(&attr_args).unwrap(); - let mut output_items: Vec = vec![]; let mk_struct_name = |variant_name: &Ident| format_ident!("{}{}", type_name, variant_name); - let variant_names = &opts.variants.idents; + let (variant_names, all_variant_features) = get_variant_and_feature_names(&opts); let struct_names = variant_names.iter().map(mk_struct_name).collect_vec(); // Vec of field data. @@ -176,26 +401,47 @@ pub fn superstruct(args: TokenStream, input: TokenStream) -> TokenStream { output_field.attrs = discard_superstruct_attrs(&output_field.attrs); // Add the field to the `variant_fields` map for all applicable variants. - let field_variants = field_opts.only.as_ref().map_or_else( - || variant_names.clone(), - |only| only.keys().cloned().collect_vec(), - ); + let field_variants = if let Some(only_variants) = field_opts.only.as_ref() { + only_variants.keys().cloned().collect_vec() + } else if let Some(feature_expr) = field_opts.feature.as_ref() { + let all_variant_features = all_variant_features + .as_ref() + .expect("all_variant_features is set"); + // Check whether field is enabled for each variant. + variant_names + .iter() + .filter(|variant| { + let variant_features = all_variant_features + .get(&variant) + .expect("variant should be in all_variant_features"); + feature_expr.eval(&variant_features) + }) + .cloned() + .collect() + } else { + // Enable for all variants. + variant_names.clone() + }; - for variant_name in field_variants { + for variant_name in &field_variants { variant_fields - .get_mut(&variant_name) - .expect("invalid variant name in `only`") + .get_mut(variant_name) + .expect("invalid variant name in `only` or `feature` expression") .push(output_field.clone()); } // Check field opts - if field_opts.only.is_some() && field_opts.getter.is_some() { - panic!("can't configure `only` and `getter` on the same field"); - } else if field_opts.only.is_none() && field_opts.partial_getter.is_some() { + let common = field_opts.only.is_none() && field_opts.feature.is_none(); + + if !common && field_opts.getter.is_some() { + panic!("can't configure `getter` on non-common field"); + } else if common && field_opts.partial_getter.is_some() { panic!("can't set `partial_getter` options on common field"); } + // TODO: check feature & only mutually exclusive let only = field_opts.only.map(|only| only.keys().cloned().collect()); + let feature = field_opts.feature; let getter_opts = field_opts.getter.unwrap_or_default(); let partial_getter_opts = field_opts.partial_getter.unwrap_or_default(); @@ -204,6 +450,8 @@ pub fn superstruct(args: TokenStream, input: TokenStream) -> TokenStream { name, field: output_field, only, + feature, + variants: field_variants, getter_opts, partial_getter_opts, }); @@ -355,7 +603,7 @@ pub fn superstruct(args: TokenStream, input: TokenStream) -> TokenStream { .filter(|f| !f.is_common()) .cartesian_product(&[false, true]) .flat_map(|(field_data, mutability)| { - let field_variants = field_data.only.as_ref()?; + let field_variants = &field_data.variants; Some(make_partial_getter( type_name, &field_data, @@ -387,6 +635,14 @@ pub fn superstruct(args: TokenStream, input: TokenStream) -> TokenStream { }) .collect_vec(); + let feature_getters = feature_getters::get_feature_getters( + type_name, + &variant_names, + all_variant_features, + &opts.variant_type, + &opts.feature_type, + ); + let impl_block = quote! { impl #impl_generics #type_name #ty_generics #where_clause { pub fn to_ref<#ref_ty_lifetime>(&#ref_ty_lifetime self) -> #ref_ty_name #ref_ty_generics { @@ -417,6 +673,9 @@ pub fn superstruct(args: TokenStream, input: TokenStream) -> TokenStream { #( #partial_getters )* + #( + #feature_getters + )* } }; output_items.push(impl_block.into()); @@ -435,7 +694,7 @@ pub fn superstruct(args: TokenStream, input: TokenStream) -> TokenStream { .iter() .filter(|f| !f.is_common()) .flat_map(|field_data| { - let field_variants = field_data.only.as_ref()?; + let field_variants = &field_data.variants; Some(make_partial_getter( &ref_ty_name, &field_data, @@ -482,7 +741,7 @@ pub fn superstruct(args: TokenStream, input: TokenStream) -> TokenStream { .iter() .filter(|f| !f.is_common() && !f.partial_getter_opts.no_mut) .flat_map(|field_data| { - let field_variants = field_data.only.as_ref()?; + let field_variants = &field_data.variants; Some(make_partial_getter( &ref_mut_ty_name, &field_data, @@ -515,7 +774,7 @@ pub fn superstruct(args: TokenStream, input: TokenStream) -> TokenStream { &ref_mut_ty_name, num_generics, &struct_names, - variant_names, + &variant_names, &opts, &mut output_items, ); @@ -792,3 +1051,14 @@ fn is_attr_with_ident(attr: &Attribute, ident: &str) -> bool { .get_ident() .map_or(false, |attr_ident| attr_ident.to_string() == ident) } + +fn get_cargo_target_dir() -> Result> { + let mut target_dir = PathBuf::from(&std::env::var("OUT_DIR")?); + // Pop 3 times to ensure that the files are generated in $CARGO_TARGET_DIR/$PROFILE. + // This workaround is required since the above env vars are not available at the time of the + // macro execution. + target_dir.pop(); + target_dir.pop(); + target_dir.pop(); + Ok(target_dir) +}