diff --git a/.gitignore b/.gitignore index f6a0084..8989e47 100644 --- a/.gitignore +++ b/.gitignore @@ -2,7 +2,6 @@ # will have compiled files and executables debug/ target/ -artifacts/ # These are backup files generated by rustfmt **/*.rs.bk @@ -26,3 +25,7 @@ artifacts/ # Exported by proto-build /proto + +# Local settings +artifacts/ +.envrc diff --git a/packages/proto/src/lib.rs b/packages/proto/src/lib.rs index 423f9de..d515b3d 100644 --- a/packages/proto/src/lib.rs +++ b/packages/proto/src/lib.rs @@ -8,6 +8,7 @@ #[cfg(feature = "abstract-any")] pub mod any; +#[allow(clippy::all)] mod gen; pub use gen::*; diff --git a/proto-build/src/commands/apply_patches.rs b/proto-build/src/commands/apply_patches.rs new file mode 100644 index 0000000..59a5130 --- /dev/null +++ b/proto-build/src/commands/apply_patches.rs @@ -0,0 +1,49 @@ +use crate::utils::patch_file::patch_file; +use glob::glob; +use std::path::{Path, PathBuf}; + +/// Fix clashing type names in prost-generated code. +fn apply_cosmos_staking_patches(out_dir: &Path) -> crate::Result<()> { + const REPLACEMENTS: &[(&str, &str)] = &[ + ("enum Validators", "enum Policy"), + ( + "stake_authorization::Validators", + "stake_authorization::Policy", + ), + ]; + + patch_file(&out_dir.join("cosmos.staking.v1beta1.rs"), REPLACEMENTS)?; + + Ok(()) +} + +pub fn apply_patches(out_dir: &Path) -> crate::Result<()> { + println!("Applying patches..."); + /// Regex substitutions to apply to the prost-generated output + const REPLACEMENTS: &[(&str, &str)] = &[ + // Feature-gate gRPC impls which use `tonic::transport` + ( + "impl(.+)tonic::transport(.+)", + "#[cfg(feature = \"grpc-transport\")]\n \ + #[cfg_attr(docsrs, doc(cfg(feature = \"grpc-transport\")))]\n \ + impl${1}tonic::transport${2}", + ), + // Feature-gate the ProtoBuf descriptors + ( + "pub const FILE_DESCRIPTOR_SET", + "#[cfg(feature = \"proto-descriptor\")]\n \ + #[cfg_attr(docsrs, doc(cfg(feature = \"proto-descriptor\")))]\n \ + pub const FILE_DESCRIPTOR_SET", + ), + ]; + + let src_files_glob = out_dir.join("*.rs"); + let src_files: Vec = glob(src_files_glob.to_str().unwrap())?.flatten().collect(); + for src in src_files { + patch_file(&src, REPLACEMENTS)?; + } + + apply_cosmos_staking_patches(out_dir)?; + + Ok(()) +} diff --git a/proto-build/src/commands/cleanup.rs b/proto-build/src/commands/cleanup.rs new file mode 100644 index 0000000..1b61c00 --- /dev/null +++ b/proto-build/src/commands/cleanup.rs @@ -0,0 +1,17 @@ +use glob::glob; +use std::fs; +use std::path::Path; + +const EXCLUDED_PROTO_PACKAGES: &[&str] = &["amino", "gogoproto", "google", "tendermint"]; + +pub fn cleanup(out_dir: &Path) { + println!("Cleaning up..."); + for &pkg in EXCLUDED_PROTO_PACKAGES { + let excluded_files_glob = format!("{}/{pkg}*.rs", out_dir.display()); + glob(excluded_files_glob.as_str()) + .unwrap() + .flatten() + .try_for_each(fs::remove_file) + .unwrap(); + } +} diff --git a/proto-build/src/commands/export.rs b/proto-build/src/commands/export.rs new file mode 100644 index 0000000..549ce2c --- /dev/null +++ b/proto-build/src/commands/export.rs @@ -0,0 +1,15 @@ +use crate::consts::{ARCHWAY_DIR, COSMOS_SDK_DIR, IBC_DIR, WASMD_DIR}; +use crate::utils::run::run_buf_export; +use std::fs; +use std::path::Path; + +pub fn export(submodules_dir: &Path, proto_dir: &Path) { + if proto_dir.exists() { + fs::remove_dir_all(proto_dir).unwrap(); + } + + run_buf_export(submodules_dir, ARCHWAY_DIR, proto_dir).unwrap(); + run_buf_export(submodules_dir, COSMOS_SDK_DIR, proto_dir).unwrap(); + run_buf_export(submodules_dir, IBC_DIR, proto_dir).unwrap(); + run_buf_export(submodules_dir, WASMD_DIR, proto_dir).unwrap(); +} diff --git a/proto-build/src/commands/generate.rs b/proto-build/src/commands/generate.rs new file mode 100644 index 0000000..a3f904f --- /dev/null +++ b/proto-build/src/commands/generate.rs @@ -0,0 +1,24 @@ +use crate::utils::run::run_cmd; +use std::fs; +use std::path::Path; + +pub fn generate(buf_gen_path: &Path, proto_path: &Path, out_dir: &Path) -> crate::Result { + println!("Generating proto..."); + + if out_dir.exists() { + fs::remove_dir_all(out_dir).unwrap(); + } + + run_cmd( + "buf", + [ + "generate", + "--template", + &buf_gen_path.display().to_string(), + "--include-imports", + "-o", + &out_dir.display().to_string(), + &proto_path.display().to_string(), + ], + ) +} diff --git a/proto-build/src/commands/mod.rs b/proto-build/src/commands/mod.rs new file mode 100644 index 0000000..60c575a --- /dev/null +++ b/proto-build/src/commands/mod.rs @@ -0,0 +1,7 @@ +pub mod apply_patches; +pub mod cleanup; +pub mod export; +pub mod generate; +pub mod output_version; +pub mod rustfmt; +pub mod update_submodules; diff --git a/proto-build/src/commands/output_version.rs b/proto-build/src/commands/output_version.rs new file mode 100644 index 0000000..fb5bf9f --- /dev/null +++ b/proto-build/src/commands/output_version.rs @@ -0,0 +1,12 @@ +use crate::consts::{ARCHWAY_REV, COSMOS_SDK_REV, IBC_REV, WASMD_REV}; +use std::fs; +use std::path::Path; + +pub fn output_versions(out_dir: &Path) { + println!("Writing versions..."); + let out_dir = Path::new(out_dir); + fs::write(out_dir.join("ARCHWAY_COMMIT"), ARCHWAY_REV).unwrap(); + fs::write(out_dir.join("COSMOS_SDK_COMMIT"), COSMOS_SDK_REV).unwrap(); + fs::write(out_dir.join("IBC_COMMIT"), IBC_REV).unwrap(); + fs::write(out_dir.join("WASMD_COMMIT"), WASMD_REV).unwrap(); +} diff --git a/proto-build/src/commands/rustfmt.rs b/proto-build/src/commands/rustfmt.rs new file mode 100644 index 0000000..cfeeb8a --- /dev/null +++ b/proto-build/src/commands/rustfmt.rs @@ -0,0 +1,22 @@ +use crate::utils::run::run_cmd; +use glob::glob; +use std::path::{Path, PathBuf}; + +fn collect_files(dir: &Path, pattern: &str) -> crate::Result> { + // dir.join("**").join(pattern); + let file_glob = format!("{}/**/{pattern}", dir.display()); + let paths: Vec = glob(file_glob.as_str())?.flatten().collect(); + Ok(paths) +} + +pub fn rustfmt(out_dir: &Path) -> crate::Result { + println!("Running rustfmt..."); + let files = collect_files(out_dir, "*.rs")?.into_iter().map(Into::into); + let args: Vec = ["--edition", "2021"] + .iter() + .map(Into::into) + .chain(files) + .collect(); + + run_cmd("rustfmt", args) +} diff --git a/proto-build/src/commands/update_submodules.rs b/proto-build/src/commands/update_submodules.rs new file mode 100644 index 0000000..6e6c99a --- /dev/null +++ b/proto-build/src/commands/update_submodules.rs @@ -0,0 +1,48 @@ +use crate::consts::{ + ARCHWAY_DIR, ARCHWAY_REV, COSMOS_SDK_DIR, COSMOS_SDK_REV, IBC_DIR, IBC_REV, WASMD_DIR, + WASMD_REV, +}; +use crate::utils::run::run_git; +use std::path::Path; + +pub fn update_submodules(submodules_dir: &Path) { + run_git(["submodule", "update", "--init"]).unwrap(); + run_git(["submodule", "foreach", "git", "fetch"]).unwrap(); + + println!("Updating archway-network/archway submodule..."); + let archway_dir = submodules_dir.join(ARCHWAY_DIR); + run_git([ + "-C", + archway_dir.to_str().unwrap(), + "reset", + "--hard", + ARCHWAY_REV, + ]) + .unwrap(); + + println!("Updating cosmos/cosmos-sdk submodule..."); + let sdk_dir = submodules_dir.join(COSMOS_SDK_DIR); + run_git([ + "-C", + sdk_dir.to_str().unwrap(), + "reset", + "--hard", + COSMOS_SDK_REV, + ]) + .unwrap(); + + println!("Updating cosmos/ibc-go submodule..."); + let ibc_dir = submodules_dir.join(IBC_DIR); + run_git(["-C", ibc_dir.to_str().unwrap(), "reset", "--hard", IBC_REV]).unwrap(); + + println!("Updating wasmd submodule..."); + let wasmd_dir = submodules_dir.join(WASMD_DIR); + run_git([ + "-C", + wasmd_dir.to_str().unwrap(), + "reset", + "--hard", + WASMD_REV, + ]) + .unwrap(); +} diff --git a/proto-build/src/consts.rs b/proto-build/src/consts.rs new file mode 100644 index 0000000..ddd00c4 --- /dev/null +++ b/proto-build/src/consts.rs @@ -0,0 +1,18 @@ +/// The Archway commit or tag to be cloned and used to build the proto files +pub const ARCHWAY_REV: &str = "v7.0.1"; +pub const ARCHWAY_DIR: &str = "archway"; + +/// The Cosmos SDK commit or tag to be cloned and used to build the proto files +pub const COSMOS_SDK_REV: &str = "v0.47.11"; +pub const COSMOS_SDK_DIR: &str = "cosmos-sdk"; + +/// The Cosmos ibc-go commit or tag to be cloned and used to build the proto files +pub const IBC_REV: &str = "v7.4.0"; +pub const IBC_DIR: &str = "ibc-go"; + +/// The wasmd commit or tag to be cloned and used to build the proto files +pub const WASMD_REV: &str = "v0.45.0"; +pub const WASMD_DIR: &str = "wasmd"; + +pub const PROTO_DIR: &str = "proto"; +pub const OUT_DIR: &str = "packages/proto/src/gen"; diff --git a/proto-build/src/main.rs b/proto-build/src/main.rs index 118d52f..930fe64 100644 --- a/proto-build/src/main.rs +++ b/proto-build/src/main.rs @@ -1,37 +1,22 @@ +mod commands; +mod consts; mod parser; - -use std::{ - ffi::OsStr, - fs, io, - path::{Path, PathBuf}, - process, -}; - +mod utils; + +use std::path::PathBuf; +use std::{io, path::Path}; + +use crate::commands::apply_patches::apply_patches; +use crate::commands::cleanup::cleanup; +use crate::commands::export::export; +use crate::commands::generate::generate; +use crate::commands::output_version::output_versions; +use crate::commands::rustfmt::rustfmt; +use crate::commands::update_submodules::update_submodules; +use crate::consts::{OUT_DIR, PROTO_DIR}; use crate::parser::generate_advanced_struct; +use crate::utils::run::run_cargo; use error_chain::error_chain; -use glob::glob; -use regex::Regex; - -/// The Archway commit or tag to be cloned and used to build the proto files -const ARCHWAY_REV: &str = "v7.0.1"; -const ARCHWAY_DIR: &str = "archway"; - -/// The Cosmos SDK commit or tag to be cloned and used to build the proto files -const COSMOS_SDK_REV: &str = "v0.47.11"; -const COSMOS_SDK_DIR: &str = "cosmos-sdk"; - -/// The Cosmos ibc-go commit or tag to be cloned and used to build the proto files -const IBC_REV: &str = "v7.4.0"; -const IBC_DIR: &str = "ibc-go"; - -/// The wasmd commit or tag to be cloned and used to build the proto files -const WASMD_REV: &str = "v0.45.0"; -const WASMD_DIR: &str = "wasmd"; - -const PROTO_DIR: &str = "proto"; -const OUT_DIR: &str = "packages/proto/src/gen"; - -const EXCLUDED_PROTO_PACKAGES: &[&str] = &["amino", "gogoproto", "google", "tendermint"]; error_chain! { foreign_links { @@ -42,402 +27,25 @@ error_chain! { } } -fn main() { - let root = workspace_root(); - let submodules_dir = format!("{}/{}", root, "external"); - let proto_dir = format!("{}/{}", root, PROTO_DIR); - let out_dir = format!("{}/{}", root, OUT_DIR); - - update_submodules(submodules_dir.as_str()); - export(submodules_dir.as_str(), proto_dir.as_str()); - generate(proto_dir.as_str(), out_dir.as_str()).unwrap(); - output_versions(out_dir.as_str()); - cleanup(out_dir.as_str()); - apply_patches(out_dir.as_str()).unwrap(); - generate_advanced_struct(out_dir.as_str()).unwrap(); - rustfmt(out_dir.as_str()).unwrap(); -} - -fn workspace_root() -> String { +fn workspace_root() -> PathBuf { let output = run_cargo(["locate-project", "--workspace", "--message-format=plain"]).unwrap(); let cargo_path = Path::new(&output); - let workspace_root = cargo_path.parent().unwrap(); - workspace_root.to_string_lossy().to_string() -} - -fn update_submodules(submodules_dir: &str) { - run_git(["submodule", "update", "--init"]).unwrap(); - run_git(["submodule", "foreach", "git", "fetch"]).unwrap(); - - println!("Updating archway-network/archway submodule..."); - let archway_dir = format!("{}/{}", submodules_dir, ARCHWAY_DIR); - run_git(["-C", archway_dir.as_str(), "reset", "--hard", ARCHWAY_REV]).unwrap(); - - println!("Updating cosmos/cosmos-sdk submodule..."); - let sdk_dir = format!("{}/{}", submodules_dir, COSMOS_SDK_DIR); - run_git(["-C", sdk_dir.as_str(), "reset", "--hard", COSMOS_SDK_REV]).unwrap(); - - println!("Updating cosmos/ibc-go submodule..."); - let ibc_dir = format!("{}/{}", submodules_dir, IBC_DIR); - run_git(["-C", ibc_dir.as_str(), "reset", "--hard", IBC_REV]).unwrap(); - - println!("Updating wasmd submodule..."); - let wasmd_dir = format!("{}/{}", submodules_dir, WASMD_DIR); - run_git(["-C", wasmd_dir.as_str(), "reset", "--hard", WASMD_REV]).unwrap(); -} - -fn export(submodules_dir: &str, proto_dir: impl AsRef) { - if proto_dir.as_ref().exists() { - fs::remove_dir_all(&proto_dir).unwrap(); - } - - run_buf_export(submodules_dir, ARCHWAY_DIR, &proto_dir).unwrap(); - run_buf_export(submodules_dir, COSMOS_SDK_DIR, &proto_dir).unwrap(); - run_buf_export(submodules_dir, IBC_DIR, &proto_dir).unwrap(); - run_buf_export(submodules_dir, WASMD_DIR, &proto_dir).unwrap(); -} - -fn output_versions(out_dir: &str) { - println!("Writing versions..."); - let out_dir = Path::new(out_dir); - fs::write(out_dir.join("ARCHWAY_COMMIT"), ARCHWAY_REV).unwrap(); - fs::write(out_dir.join("COSMOS_SDK_COMMIT"), COSMOS_SDK_REV).unwrap(); - fs::write(out_dir.join("IBC_COMMIT"), IBC_REV).unwrap(); - fs::write(out_dir.join("WASMD_COMMIT"), WASMD_REV).unwrap(); -} - -fn apply_patches(out_dir: &str) -> Result<()> { - println!("Applying patches..."); - /// Regex substitutions to apply to the prost-generated output - const REPLACEMENTS: &[(&str, &str)] = &[ - // Feature-gate gRPC impls which use `tonic::transport` - ( - "impl(.+)tonic::transport(.+)", - "#[cfg(feature = \"grpc-transport\")]\n \ - #[cfg_attr(docsrs, doc(cfg(feature = \"grpc-transport\")))]\n \ - impl${1}tonic::transport${2}", - ), - // Feature-gate the ProtoBuf descriptors - ( - "pub const FILE_DESCRIPTOR_SET", - "#[cfg(feature = \"proto-descriptor\")]\n \ - #[cfg_attr(docsrs, doc(cfg(feature = \"proto-descriptor\")))]\n \ - pub const FILE_DESCRIPTOR_SET", - ), - ]; - - let src_files_glob = format!("{out_dir}/*.rs"); - let src_files: Vec = glob(src_files_glob.as_str())?.flatten().collect(); - for src in src_files { - patch_file(src, REPLACEMENTS)?; - } - - apply_cosmos_staking_patches(out_dir); - - Ok(()) -} - -fn _generate_advanced_struct(out_dir: &str) -> Result<()> { - println!("Generating Any replacements..."); - - // These files should be ignored - let ignore: Vec = [ - "cosmos.crypto.keyring.v1.rs", - "ibc.core.client.v1.rs", - "ibc.core.connection.v1.rs", - ] - .iter() - .map(|f| { - let mut p = PathBuf::from(out_dir); - p.push(f); - p - }) - .collect(); - - // Premake regexes - let struct_regex_str = "pub struct[[^}][:alnum:][:cntrl:]]+}"; - let struct_prost_name = - "[[:cntrl:]]impl ::prost::Name for [[^}][:alnum:][:cntrl:]]+}[[^}][:alnum:][:cntrl:]]+}}"; - let struct_regex = - Regex::new(&format!("({})({})", struct_regex_str, struct_prost_name)).unwrap(); - let any_regex = Regex::new("::pbjson_types::Any").unwrap(); - let struct_start_regex = Regex::new("(pub struct [[:alnum:]]+)").unwrap(); - let struct_prost_start_regex = Regex::new("(impl)( ::prost::Name for [[:alnum:]]+)").unwrap(); - let generic_option_regex = - Regex::new("(pub [[:alnum:]]+: ::core::option::Option<[[:alpha:]]>,)").unwrap(); - let generic_vec_regex = - Regex::new("(pub [[:alnum:]]+: ::prost::alloc::vec::Vec<[[:alpha:]]>,)").unwrap(); - let file_regex = Regex::new(r"(\.[[:alnum:]]+\.)rs").unwrap(); - - const GENERICS: [char; 6] = ['A', 'B', 'C', 'D', 'E', 'F']; - - // Get all generated files - let src_files_glob = format!("{out_dir}/*.rs"); - let src_files: Vec = glob(src_files_glob.as_str())?.flatten().collect(); - - for src in src_files { - if ignore.contains(&src) { - continue; - } - - let current_file = fs::read_to_string(&src)?; - - // Patches file that feature locks conflicting structs - let mut new_file = String::new(); - let mut new_file_cursor = 0; - - // Contains all the any replacements - let mut advanced_file = String::new(); - - for matched_struct in struct_regex.find_iter(¤t_file) { - // Add necessary prefix info - let mut res = "\ - #[allow(clippy::derive_partial_eq_without_eq)] \n \ - #[derive(Clone, PartialEq, ::prost::Message, ::serde::Serialize, ::serde::Deserialize)] \n \ - #[cfg(feature = \"replace-any\")]\n \ - ".to_string(); - let s = matched_struct.as_str(); - - // Generics builder - let mut generics = "<".to_string(); - - // Last index, it makes string building much easier - let mut cursor = 0; - // Find all occurrences of Any and replace them with a generic - for (i, m) in any_regex.find_iter(s).enumerate() { - // Build struct - res.push_str(&s[cursor..m.start()]); - res.push(GENERICS[i]); - - generics.push(GENERICS[i]); - generics.push(','); - cursor = m.end(); - } - res.push_str(&s[cursor..s.len()]); - generics.push('>'); - - // Add generics to the struct - res = struct_start_regex - .replace(res.as_str(), format!("{}{}", "${1}", generics)) - .to_string(); - res = struct_prost_start_regex - .replace( - res.as_str(), - format!( - "#[cfg(feature = \"replace-any\")]\n{}{generics}{}{generics}", - "${1}", "${2}" - ), - ) - .to_string(); - - // If cursor == 0 it means there was never a match - if cursor != 0 { - // Patch the file with feature gating to avoid conflicts - new_file.push_str(¤t_file[new_file_cursor..matched_struct.start()]); - new_file_cursor = matched_struct.end(); - new_file.push_str(&struct_regex.replace( - matched_struct.as_str(), - &format!( - "{cfg}{}{cfg}{}", - "${1}", - "${2}", - cfg = "#[cfg(not(feature = \"replace-any\"))]\n" - ), - )); - - // Add serde option serialization - res = generic_option_regex - .replace_all( - &res, - "\ - #[serde( \n \ - serialize_with = \"crate::any::option::serialize\", \n \ - deserialize_with = \"crate::any::option::deserialize\" \n \ - )] \n \ - ${1}\ - ", - ) - .to_string(); - - // Add serde vec serialization - res = generic_vec_regex - .replace_all( - &res, - "\ - #[serde( \n \ - serialize_with = \"crate::any::vec::serialize\", \n \ - deserialize_with = \"crate::any::vec::deserialize\" \n \ - )] \n \ - ${1}\ - ", - ) - .to_string(); - - advanced_file.push_str(&res); - } - } - - // Create new file - if !advanced_file.is_empty() { - // Patch feature gating - new_file.push_str(¤t_file[new_file_cursor..current_file.len()]); - fs::write(&src, new_file).unwrap(); - - let path = file_regex - .replace(src.to_str().unwrap(), "${1}advanced.rs") - .to_string(); - patch_file( - src, - &[( - r"(// @@protoc_insertion_point\(module\))", - &format!( - "include!(\"{}\");\n{}", - path.split('/').last().unwrap(), - "${1}" - ), - )], - )?; - fs::write(path, advanced_file).unwrap(); - } - } - - Ok(()) -} - -/// Fix clashing type names in prost-generated code. -fn apply_cosmos_staking_patches(out_dir: &str) { - const REPLACEMENTS: &[(&str, &str)] = &[ - ("enum Validators", "enum Policy"), - ( - "stake_authorization::Validators", - "stake_authorization::Policy", - ), - ]; - - patch_file(format!("{out_dir}/cosmos.staking.v1beta1.rs"), REPLACEMENTS) - .expect("error patching cosmos.staking.v1beta1.rs"); -} - -fn patch_file(path: impl AsRef, replacements: &[(&str, &str)]) -> io::Result<()> { - let mut contents = fs::read_to_string(&path)?; - - for &(regex, replacement) in replacements { - contents = Regex::new(regex) - .unwrap_or_else(|_| panic!("invalid regex: {}", regex)) - .replace_all(&contents, replacement) - .to_string(); - } - - fs::write(path, &contents) -} - -fn cleanup(out_dir: &str) { - println!("Cleaning up..."); - for &pkg in EXCLUDED_PROTO_PACKAGES { - let excluded_files_glob = format!("{out_dir}/{pkg}*.rs"); - glob(excluded_files_glob.as_str()) - .unwrap() - .flatten() - .try_for_each(fs::remove_file) - .unwrap(); - } + cargo_path.parent().unwrap().to_path_buf() } -fn run_buf_export( - submodules_dir: &str, - proto: &str, - export_dir: impl AsRef, -) -> Result { - println!("Exporting {}...", proto); - let proto_path = format!("{}/{}/{}", submodules_dir, proto, "proto"); - run_cmd( - "buf", - [ - "export", - "-o", - &export_dir.as_ref().display().to_string(), - &proto_path, - ], - ) -} - -fn generate(proto_path: impl AsRef, out_dir: impl AsRef) -> Result { - println!("Generating proto..."); - - if out_dir.as_ref().exists() { - fs::remove_dir_all(&out_dir).unwrap(); - } - - run_cmd( - "buf", - [ - "generate", - "--template", - "proto-build/buf.gen.yaml", - "--include-imports", - "-o", - &out_dir.as_ref().display().to_string(), - &proto_path.as_ref().display().to_string(), - ], - ) -} - -fn run_git(args: impl IntoIterator>) -> Result { - run_cmd("git", args) -} - -fn rustfmt(out_dir: &str) -> Result { - println!("Running rustfmt..."); - let files = collect_files(out_dir, "*.rs")?.into_iter().map(Into::into); - let args: Vec = ["--edition", "2021"] - .iter() - .map(Into::into) - .chain(files) - .collect(); - - run_cmd("rustfmt", args) -} - -fn collect_files(dir: &str, pattern: &str) -> Result> { - let file_glob = format!("{dir}/**/{pattern}"); - let paths: Vec = glob(file_glob.as_str())?.flatten().collect(); - Ok(paths) -} - -fn run_cargo(args: impl IntoIterator>) -> Result { - run_cmd(env!("CARGO"), args) -} - -fn run_cmd( - cmd: impl AsRef, - args: impl IntoIterator>, -) -> Result { - let process::Output { - stdout, - stderr, - status, - } = process::Command::new(&cmd) - .args(args) - .output() - .unwrap_or_else(|e| match e.kind() { - io::ErrorKind::NotFound => panic!( - "error running '{:?}': command not found. Is it installed?", - cmd.as_ref() - ), - _ => panic!("error running '{:?}': {:?}", cmd.as_ref(), e), - }); - - let output = std::str::from_utf8(&stdout)?.trim(); - if !status.success() { - let error = std::str::from_utf8(&stderr)?.trim(); - panic!( - "{:?} exited with error code: {:?}\nstdout: {:?}\nstderr: {:?}", - cmd.as_ref(), - status.code().unwrap_or(-1), - output, - error - ); - } - - Ok(output.to_string()) +fn main() { + let root = workspace_root(); + let submodules_dir = root.join("external"); + let proto_dir = root.join(PROTO_DIR); + let out_dir = root.join(OUT_DIR); + let buf_gen_path = root.join("proto-build").join("buf.gen.yaml"); + + update_submodules(&submodules_dir); + export(&submodules_dir, &proto_dir); + generate(&buf_gen_path, &proto_dir, &out_dir).unwrap(); + output_versions(&out_dir); + cleanup(&out_dir); + apply_patches(&out_dir).unwrap(); + generate_advanced_struct(&out_dir).unwrap(); + rustfmt(&out_dir).unwrap(); } diff --git a/proto-build/src/parser.rs b/proto-build/src/parser.rs deleted file mode 100644 index f0b8278..0000000 --- a/proto-build/src/parser.rs +++ /dev/null @@ -1,523 +0,0 @@ -use crate::patch_file; -use glob::glob; -use proc_macro2::{Literal, Punct, Spacing, Span, TokenStream}; -use quote::{quote, TokenStreamExt}; -use regex::Regex; -use std::cmp::Ordering; -use std::collections::BTreeMap; -use std::fs; -use std::path::PathBuf; -use syn::punctuated::Punctuated; -use syn::token::{Paren, PathSep}; -use syn::{ - AngleBracketedGenericArguments, AttrStyle, Attribute, Field, Fields, FieldsNamed, File, - GenericArgument, GenericParam, Ident, Item, ItemStruct, MacroDelimiter, Meta, MetaList, Path, - PathArguments, PathSegment, TraitBound, TraitBoundModifier, Type, TypeParam, TypeParamBound, - TypePath, -}; - -fn as_struct(item: &mut Item) -> Option<&mut ItemStruct> { - match item { - Item::Struct(s) => Some(s), - _ => None, - } -} - -fn as_path(item: &mut Type) -> Option<&mut TypePath> { - match item { - Type::Path(ret) => Some(ret), - _ => None, - } -} - -fn as_named_fields(fields: &mut Fields) -> Option<&mut FieldsNamed> { - match fields { - Fields::Named(ret) => Some(ret), - _ => None, - } -} - -enum FoundEnclosure { - Option, - Vec, -} - -// The types we're going to be modifying only appear enclosed in Options and Vecs -fn is_important(field: &mut Field) -> Option<(FoundEnclosure, &mut TypePath)> { - let path = as_path(&mut field.ty).unwrap(); - // Get the last segment since the rest is a path - let field_type = path.path.segments.iter_mut().last().unwrap(); - - let field_type_ident = field_type.ident.to_string(); - let found = if &field_type_ident == "Option" { - Some(FoundEnclosure::Option) - } else if &field_type_ident == "Vec" { - Some(FoundEnclosure::Vec) - } else { - None - }; - - // If we found the types we needed - if let Some(enclosed) = found { - if let PathArguments::AngleBracketed(bracket) = &mut field_type.arguments { - for p in bracket.args.iter_mut() { - if let GenericArgument::Type(t) = p { - if let Some(any_path) = as_path(t) { - return Some((enclosed, any_path)); - } else { - println!("something else") - } - } - } - } - } - - None -} - -// prost Message implements a generic B which was conflicting with this script -const GENERICS: [&str; 10] = ["A", "BB", "C", "D", "E", "F", "G", "H", "I", "J"]; - -fn gen_generic(name: &str) -> Path { - Path { - leading_colon: None, - segments: create_punctuated(vec![name]), - } -} - -fn create_punctuated(path: Vec<&str>) -> Punctuated { - let mut ret = Punctuated::new(); - for p in path { - ret.push(PathSegment { - ident: Ident::new(p, Span::call_site()), - arguments: PathArguments::None, - }); - } - ret -} - -fn trait_param_bound(path: Vec<&str>) -> TypeParamBound { - TypeParamBound::Trait(TraitBound { - paren_token: None, - modifier: TraitBoundModifier::None, - lifetimes: None, - path: Path { - leading_colon: None, - segments: create_punctuated(path), - }, - }) -} - -fn gen_unnamed_param(name: &str) -> TypeParam { - let mut type_param = TypeParam::from(Ident::new(name, Span::call_site())); - type_param.bounds.push(trait_param_bound(vec!["Clone"])); - type_param.bounds.push(trait_param_bound(vec!["PartialEq"])); - type_param.bounds.push(trait_param_bound(vec!["Default"])); - type_param.bounds.push(trait_param_bound(vec!["Send"])); - type_param.bounds.push(trait_param_bound(vec!["Sync"])); - type_param - .bounds - .push(trait_param_bound(vec!["prost", "Message"])); - type_param - .bounds - .push(trait_param_bound(vec!["serde", "Serialize"])); - type_param - .bounds - .push(trait_param_bound(vec!["serde", "de", "DeserializeOwned"])); - - type_param -} - -fn gen_type_param(name: &str) -> TypeParam { - let mut type_param = gen_unnamed_param(name); - type_param - .bounds - .push(trait_param_bound(vec!["prost", "Name"])); - type_param -} - -pub fn generate_advanced_struct(out_dir: &str) -> crate::Result<()> { - println!("Loading and patching all files containing Any"); - let mut project_tokens = load_and_patch_any(out_dir); - println!("Patching generic trait constraints"); - patch_generics(&mut project_tokens); - println!("Patching prost Name impls"); - patch_impls(&mut project_tokens); - println!("Saving changes"); - save(out_dir, &project_tokens); - - Ok(()) -} - -fn load_and_patch_any(out_dir: &str) -> BTreeMap)> { - // Map all file ASTs - let mut project_tokens = BTreeMap::new(); - - // Get all generated files - let src_files_glob = format!("{out_dir}/*.rs"); - let src_files: Vec = glob(src_files_glob.as_str()).unwrap().flatten().collect(); - - for src in src_files { - let current_file = fs::read_to_string(&src).unwrap(); - - // // Filter files that dont have `Any` - // if !any_filter.is_match(¤t_file) { - // continue; - // } - - let mut ast = syn::parse_file(¤t_file).unwrap(); - - // Get all struct we might work with here - let mut structs = BTreeMap::new(); - - // First pass is just for replacing Any - // Also adds all the serde related information to serialize and deserialize into an appropriate structure - for (idx, item) in ast.items.iter_mut().enumerate() { - if let Some(item) = as_struct(item) { - // Find any fields and replace with generics - let fields = as_named_fields(&mut item.fields).unwrap(); - for field in fields.named.iter_mut() { - if let Some((ty, path)) = is_important(field) { - if path.path.segments.last().unwrap().ident == "Any" { - // Set struct generics - let generic = GENERICS[item.generics.params.len()]; - path.path = gen_generic(generic); - item.generics - .params - .push(GenericParam::Type(gen_type_param(generic))); - - // Set serialization function - let serde_path = match ty { - FoundEnclosure::Option => "option", - FoundEnclosure::Vec => "vec", - }; - - let mut token_stream = TokenStream::new(); - token_stream.append(Ident::new("serialize_with", Span::call_site())); - token_stream.append(Punct::new('=', Spacing::Alone)); - token_stream.append(Literal::string(&format!( - "crate::any::{}::serialize", - serde_path - ))); - token_stream.append(Punct::new(',', Spacing::Alone)); - token_stream.append(Ident::new("deserialize_with", Span::call_site())); - token_stream.append(Punct::new('=', Spacing::Alone)); - token_stream.append(Literal::string(&format!( - "crate::any::{}::deserialize", - serde_path - ))); - - field.attrs.push(Attribute { - pound_token: Default::default(), - style: AttrStyle::Outer, - bracket_token: Default::default(), - meta: Meta::List(MetaList { - path: Path { - leading_colon: None, - segments: create_punctuated(vec!["serde"]), - }, - delimiter: MacroDelimiter::Paren(Paren::default()), - tokens: token_stream, - }), - }); - } - } - } - - // Add the struct reference to work on it later - structs.insert(item.ident.to_string(), idx); - } - } - - // Remove the tonic include for now - if let Some(Item::Macro(_)) = ast.items.last() { - ast.items.pop(); - } - - let file_name = src.to_str().unwrap().split('/').last().unwrap(); - project_tokens.insert(file_name.to_string(), (ast, structs)); - } - - project_tokens -} - -fn patch_generics(files: &mut BTreeMap)>) { - let mut updated_files = BTreeMap::new(); - for key in files.keys().cloned() { - updated_files.insert(key, false); - } - - loop { - // Go through all keys - // Pop current key to be able to use the tree map whenever - // Check for struct in local struct, if not found check in all of the map - // Push the struct back inside - - let mut new_fixes = false; - - let keys: Vec = files.keys().cloned().collect(); - for key in keys { - let (mut ast, structs) = files.remove(&key).unwrap(); - - let struct_idxs: Vec = structs.values().cloned().collect(); - - // Iterate through each struct, get their generic total - for idx in struct_idxs { - let (left, arr) = ast.items.split_at_mut(idx); - let (temp, right) = arr.split_at_mut(1); - - let curr_struct = as_struct(temp.get_mut(0).unwrap()).unwrap(); - let total_generics = curr_struct.generics.params.len(); - let mut new_total_generics = 0; - - // Iterate through each field and automatically update the fields and add to total field tally - for field in as_named_fields(&mut curr_struct.fields) - .unwrap() - .named - .iter_mut() - { - let name = field.ident.clone().unwrap().to_string(); - let mut found_ty = None; - if let Some((field_ty, path)) = is_important(field) { - let ty = path.path.segments.last_mut().unwrap(); - - let ident_name = ty.ident.to_string(); - - fn push_generics( - ty_struct: &ItemStruct, - ty: &mut PathSegment, - mut new_total_generics: usize, - ) -> usize { - let mut args = Punctuated::new(); - - for _ in 0..ty_struct.generics.params.len() { - args.push(GenericArgument::Type(Type::Path(TypePath { - qself: None, - path: gen_generic(GENERICS[new_total_generics]), - }))); - new_total_generics += 1; - } - - ty.arguments = - PathArguments::AngleBracketed(AngleBracketedGenericArguments { - colon2_token: None, - lt_token: Default::default(), - args, - gt_token: Default::default(), - }); - - new_total_generics - } - - // Generis state patch - if ident_name == "GenesisState" - && key == "ibc.core.types.v1.rs" - && name == "client_genesis" - { - let (key, s) = files.get_mut("ibc.core.client.v1.rs").unwrap(); - let ty_struct = as_struct( - key.items.get_mut(*s.get("GenesisState").unwrap()).unwrap(), - ) - .unwrap(); - found_ty = Some(field_ty); - new_total_generics = push_generics(ty_struct, ty, new_total_generics); - } else if let Some(i) = structs.get(&ident_name) { - let ty_item = match i.cmp(&idx) { - Ordering::Less => left.get_mut(*i).unwrap(), - Ordering::Greater => right.get_mut(*i - idx - 1).unwrap(), - Ordering::Equal => continue, - }; - - let ty_struct = as_struct(ty_item).unwrap(); - found_ty = Some(field_ty); - new_total_generics = push_generics(ty_struct, ty, new_total_generics); - } else if GENERICS.contains(&&*ident_name) { - ty.ident = Ident::new(GENERICS[new_total_generics], Span::call_site()); - new_total_generics += 1; - } else if let Some((_, (other_ast, other_structs))) = files - .iter_mut() - .find(|(_, (_, s))| s.contains_key(&ident_name)) - { - let ty_struct = as_struct( - other_ast - .items - .get_mut(*other_structs.get(&ident_name).unwrap()) - .unwrap(), - ) - .unwrap(); - found_ty = Some(field_ty); - new_total_generics = push_generics(ty_struct, ty, new_total_generics); - } - } - - // Try to add field attrs - if let Some(found_ty) = found_ty { - let last = field.attrs.last().unwrap().clone(); - if let Meta::List(meta_list) = &last.meta { - if meta_list.path.segments.last().unwrap().ident != "serde" { - // Set serialization function - let serde_path = match found_ty { - FoundEnclosure::Option => "option", - FoundEnclosure::Vec => "vec", - }; - - let mut token_stream = TokenStream::new(); - token_stream - .append(Ident::new("serialize_with", Span::call_site())); - token_stream.append(Punct::new('=', Spacing::Alone)); - token_stream.append(Literal::string(&format!( - "crate::any::{}::generic_serialize", - serde_path - ))); - token_stream.append(Punct::new(',', Spacing::Alone)); - token_stream - .append(Ident::new("deserialize_with", Span::call_site())); - token_stream.append(Punct::new('=', Spacing::Alone)); - token_stream.append(Literal::string(&format!( - "crate::any::{}::generic_deserialize", - serde_path - ))); - - field.attrs.push(Attribute { - pound_token: Default::default(), - style: AttrStyle::Outer, - bracket_token: Default::default(), - meta: Meta::List(MetaList { - path: Path { - leading_colon: None, - segments: create_punctuated(vec!["serde"]), - }, - delimiter: MacroDelimiter::Paren(Paren::default()), - tokens: token_stream, - }), - }); - } - } - } - } - - if new_total_generics > 0 { - updated_files.insert(key.clone(), true); - } - - if total_generics != new_total_generics { - curr_struct.generics.params.clear(); - - for gen in GENERICS[0..new_total_generics].iter() { - curr_struct - .generics - .params - .push(GenericParam::Type(gen_type_param(gen))); - } - new_fixes = true; - } - } - - files.insert(key, (ast, structs)); - } - - if !new_fixes { - break; - } - } - - // Remove files that werent updated - for (key, updated) in updated_files.iter() { - if !updated { - files.remove(key); - } - } -} - -fn patch_impls(files: &mut BTreeMap)>) { - for (_, (ast, structs)) in files.iter_mut() { - // Fix the Name implementations - // Since we are borrowing from the same array, we need to split the list to use it - for i in 1..ast.items.len() { - let (left, right) = ast.items.split_at_mut(i); - let item = right.get_mut(0).unwrap(); - if let Item::Impl(impl_item) = item { - // Ignore if its not the Name impl - if let Some(t) = impl_item.trait_.clone() { - if t.1.segments.last().unwrap().ident != "Name" { - continue; - } - } - - // Get the struct that is getting an impl - let implemented = as_path(impl_item.self_ty.as_mut()) - .map(|p| p.path.segments.last().unwrap().ident.to_string()) - .unwrap_or("".to_string()); - - let idx = match structs.get(&implemented) { - // If its not a struct then ignore - None => continue, - Some(idx) => *idx, - }; - // Unwrap cause we know its a struct - let struct_item = as_struct(left.get_mut(idx).unwrap()).unwrap(); - // Ignore structs with no generics - if struct_item.generics.params.is_empty() { - continue; - } - - impl_item.generics = struct_item.generics.clone(); - - if let Some(impl_path) = as_path(impl_item.self_ty.as_mut()) { - let mut args = Punctuated::new(); - - for gen in GENERICS[0..impl_item.generics.params.len()].iter() { - args.push(GenericArgument::Type(Type::Path(TypePath { - qself: None, - path: gen_generic(gen), - }))); - } - - let seg = impl_path.path.segments.last_mut().unwrap(); - - seg.arguments = PathArguments::AngleBracketed(AngleBracketedGenericArguments { - colon2_token: None, - lt_token: Default::default(), - args, - gt_token: Default::default(), - }); - } - } - } - } -} - -fn save(out_dir: &str, files: &BTreeMap)>) { - for (file, (data, _)) in files.iter() { - // Patch the mod file - let file_regex = Regex::new(r"(\.[[:alnum:]]+\.)rs").unwrap(); - let new_file = file_regex.replace(file, "${1}abstract.rs").to_string(); - - patch_file( - format!("{}/mod.rs", out_dir), - &[( - &format!( - r"include!\(.{}.\);", - Regex::new(r"(\.)").unwrap().replace(file, r"\.") - ), - &format!( - "\ - #[cfg(not(feature = \"abstract-any\"))]\n\ - include!(\"{}\");\n\ - #[cfg(feature = \"abstract-any\")]\n\ - include!(\"{}\");\ - ", - file, new_file - ), - )], - ) - .unwrap(); - - // Export the generated structure and save in file - fs::write( - format!("{}/{}", out_dir, new_file), - quote!(#data).to_string(), - ) - .unwrap(); - } -} diff --git a/proto-build/src/parser/commands/load_and_patch_any.rs b/proto-build/src/parser/commands/load_and_patch_any.rs new file mode 100644 index 0000000..93d8b40 --- /dev/null +++ b/proto-build/src/parser/commands/load_and_patch_any.rs @@ -0,0 +1,109 @@ +use crate::parser::consts::GENERICS; +use crate::parser::utils::common::{ + create_punctuated, fields_as_named, gen_generic, item_as_struct, +}; +use crate::parser::utils::gen_type_param::gen_type_param; +use crate::parser::utils::is_important::{is_important, FoundEnclosure}; +use glob::glob; +use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream}; +use quote::TokenStreamExt; +use std::collections::BTreeMap; +use std::fs; +use std::path::{Path, PathBuf}; +use syn::token::Paren; +use syn::{AttrStyle, Attribute, File, GenericParam, Item, MacroDelimiter, Meta, MetaList}; + +pub fn load_and_patch_any(out_dir: &Path) -> BTreeMap)> { + // Map all file ASTs + let mut project_tokens = BTreeMap::new(); + + // Get all generated files + let src_files_glob = out_dir.join("*.rs"); + let src_files: Vec = glob(src_files_glob.to_str().unwrap()) + .unwrap() + .flatten() + .collect(); + + for src in src_files { + let current_file = fs::read_to_string(&src).unwrap(); + + // // Filter files that don't have `Any` + // if !any_filter.is_match(¤t_file) { + // continue; + // } + + let mut ast = syn::parse_file(¤t_file).unwrap(); + + // Get all struct we might work with here + let mut structs = BTreeMap::new(); + + // First pass is just for replacing Any + // Also adds all the serde related information to serialize and deserialize into an appropriate structure + for (idx, item) in ast.items.iter_mut().enumerate() { + if let Some(item) = item_as_struct(item) { + // Find any fields and replace with generics + let fields = fields_as_named(&mut item.fields).unwrap(); + for field in fields.named.iter_mut() { + if let Some((ty, path)) = is_important(field) { + if path.path.segments.last().unwrap().ident == "Any" { + // Set struct generics + let generic = GENERICS[item.generics.params.len()]; + path.path = gen_generic(generic); + item.generics + .params + .push(GenericParam::Type(gen_type_param(generic))); + + // Set serialization function + let serde_path = match ty { + FoundEnclosure::Option => "option", + FoundEnclosure::Vec => "vec", + }; + + let mut token_stream = TokenStream::new(); + token_stream.append(Ident::new("serialize_with", Span::call_site())); + token_stream.append(Punct::new('=', Spacing::Alone)); + token_stream.append(Literal::string(&format!( + "crate::any::{}::serialize", + serde_path + ))); + token_stream.append(Punct::new(',', Spacing::Alone)); + token_stream.append(Ident::new("deserialize_with", Span::call_site())); + token_stream.append(Punct::new('=', Spacing::Alone)); + token_stream.append(Literal::string(&format!( + "crate::any::{}::deserialize", + serde_path + ))); + + field.attrs.push(Attribute { + pound_token: Default::default(), + style: AttrStyle::Outer, + bracket_token: Default::default(), + meta: Meta::List(MetaList { + path: syn::Path { + leading_colon: None, + segments: create_punctuated(vec!["serde"]), + }, + delimiter: MacroDelimiter::Paren(Paren::default()), + tokens: token_stream, + }), + }); + } + } + } + + // Add the struct reference to work on it later + structs.insert(item.ident.to_string(), idx); + } + } + + // Remove the tonic include for now + if let Some(Item::Macro(_)) = ast.items.last() { + ast.items.pop(); + } + + let file_name = src.to_str().unwrap().split('/').last().unwrap(); + project_tokens.insert(file_name.to_string(), (ast, structs)); + } + + project_tokens +} diff --git a/proto-build/src/parser/commands/mod.rs b/proto-build/src/parser/commands/mod.rs new file mode 100644 index 0000000..21d853e --- /dev/null +++ b/proto-build/src/parser/commands/mod.rs @@ -0,0 +1,4 @@ +pub mod load_and_patch_any; +pub mod patch_generics; +pub mod patch_impls; +pub mod save; diff --git a/proto-build/src/parser/commands/patch_generics.rs b/proto-build/src/parser/commands/patch_generics.rs new file mode 100644 index 0000000..edc0a14 --- /dev/null +++ b/proto-build/src/parser/commands/patch_generics.rs @@ -0,0 +1,204 @@ +use crate::parser::consts::GENERICS; +use crate::parser::utils::common::{ + create_punctuated, fields_as_named, gen_generic, item_as_struct, +}; +use crate::parser::utils::gen_type_param::gen_type_param; +use crate::parser::utils::is_important::{is_important, FoundEnclosure}; +use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream}; +use quote::TokenStreamExt; +use std::cmp::Ordering; +use std::collections::BTreeMap; +use syn::punctuated::Punctuated; +use syn::token::Paren; +use syn::{ + AngleBracketedGenericArguments, AttrStyle, Attribute, File, GenericArgument, GenericParam, + ItemStruct, MacroDelimiter, Meta, MetaList, Path, PathArguments, PathSegment, Type, TypePath, +}; + +pub fn patch_generics(files: &mut BTreeMap)>) { + let mut updated_files = BTreeMap::new(); + for key in files.keys().cloned() { + updated_files.insert(key, false); + } + + loop { + // Go through all keys + // Pop current key to be able to use the tree map whenever + // Check for struct in local struct, if not found check in `all` of the map + // Push the struct back inside + + let mut new_fixes = false; + + let keys: Vec = files.keys().cloned().collect(); + for key in keys { + let (mut ast, structs) = files.remove(&key).unwrap(); + + let struct_idxs: Vec = structs.values().cloned().collect(); + + // Iterate through each struct, get their generic total + for idx in struct_idxs { + let (left, arr) = ast.items.split_at_mut(idx); + let (temp, right) = arr.split_at_mut(1); + + let curr_struct = item_as_struct(temp.get_mut(0).unwrap()).unwrap(); + let total_generics = curr_struct.generics.params.len(); + let mut new_total_generics = 0; + + // Iterate through each field and automatically update the fields and add to total field tally + for field in fields_as_named(&mut curr_struct.fields) + .unwrap() + .named + .iter_mut() + { + let name = field.ident.clone().unwrap().to_string(); + let mut found_ty = None; + if let Some((field_ty, path)) = is_important(field) { + let ty = path.path.segments.last_mut().unwrap(); + + let ident_name = ty.ident.to_string(); + + fn push_generics( + ty_struct: &ItemStruct, + ty: &mut PathSegment, + mut new_total_generics: usize, + ) -> usize { + let mut args = Punctuated::new(); + + for _ in 0..ty_struct.generics.params.len() { + args.push(GenericArgument::Type(Type::Path(TypePath { + qself: None, + path: gen_generic(GENERICS[new_total_generics]), + }))); + new_total_generics += 1; + } + + ty.arguments = + PathArguments::AngleBracketed(AngleBracketedGenericArguments { + colon2_token: None, + lt_token: Default::default(), + args, + gt_token: Default::default(), + }); + + new_total_generics + } + + // Generis state patch + if ident_name == "GenesisState" + && key == "ibc.core.types.v1.rs" + && name == "client_genesis" + { + let (key, s) = files.get_mut("ibc.core.client.v1.rs").unwrap(); + let ty_struct = item_as_struct( + key.items.get_mut(*s.get("GenesisState").unwrap()).unwrap(), + ) + .unwrap(); + found_ty = Some(field_ty); + new_total_generics = push_generics(ty_struct, ty, new_total_generics); + } else if let Some(i) = structs.get(&ident_name) { + let ty_item = match i.cmp(&idx) { + Ordering::Less => left.get_mut(*i).unwrap(), + Ordering::Greater => right.get_mut(*i - idx - 1).unwrap(), + Ordering::Equal => continue, + }; + + let ty_struct = item_as_struct(ty_item).unwrap(); + found_ty = Some(field_ty); + new_total_generics = push_generics(ty_struct, ty, new_total_generics); + } else if GENERICS.contains(&&*ident_name) { + ty.ident = Ident::new(GENERICS[new_total_generics], Span::call_site()); + new_total_generics += 1; + } else if let Some((_, (other_ast, other_structs))) = files + .iter_mut() + .find(|(_, (_, s))| s.contains_key(&ident_name)) + { + let ty_struct = item_as_struct( + other_ast + .items + .get_mut(*other_structs.get(&ident_name).unwrap()) + .unwrap(), + ) + .unwrap(); + found_ty = Some(field_ty); + new_total_generics = push_generics(ty_struct, ty, new_total_generics); + } + } + + // Try to add field attrs + if let Some(found_ty) = found_ty { + let last = field.attrs.last().unwrap().clone(); + if let Meta::List(meta_list) = &last.meta { + if meta_list.path.segments.last().unwrap().ident != "serde" { + // Set serialization function + let serde_path = match found_ty { + FoundEnclosure::Option => "option", + FoundEnclosure::Vec => "vec", + }; + + let mut token_stream = TokenStream::new(); + token_stream + .append(Ident::new("serialize_with", Span::call_site())); + token_stream.append(Punct::new('=', Spacing::Alone)); + token_stream.append(Literal::string(&format!( + "crate::any::{}::generic_serialize", + serde_path + ))); + token_stream.append(Punct::new(',', Spacing::Alone)); + token_stream + .append(Ident::new("deserialize_with", Span::call_site())); + token_stream.append(Punct::new('=', Spacing::Alone)); + token_stream.append(Literal::string(&format!( + "crate::any::{}::generic_deserialize", + serde_path + ))); + + field.attrs.push(Attribute { + pound_token: Default::default(), + style: AttrStyle::Outer, + bracket_token: Default::default(), + meta: Meta::List(MetaList { + path: Path { + leading_colon: None, + segments: create_punctuated(vec!["serde"]), + }, + delimiter: MacroDelimiter::Paren(Paren::default()), + tokens: token_stream, + }), + }); + } + } + } + } + + if new_total_generics > 0 { + updated_files.insert(key.clone(), true); + } + + if total_generics != new_total_generics { + curr_struct.generics.params.clear(); + + for gen in GENERICS[0..new_total_generics].iter() { + curr_struct + .generics + .params + .push(GenericParam::Type(gen_type_param(gen))); + } + new_fixes = true; + } + } + + files.insert(key, (ast, structs)); + } + + if !new_fixes { + break; + } + } + + // Remove files that werent updated + for (key, updated) in updated_files.iter() { + if !updated { + files.remove(key); + } + } +} diff --git a/proto-build/src/parser/commands/patch_impls.rs b/proto-build/src/parser/commands/patch_impls.rs new file mode 100644 index 0000000..301c620 --- /dev/null +++ b/proto-build/src/parser/commands/patch_impls.rs @@ -0,0 +1,65 @@ +use crate::parser::consts::GENERICS; +use crate::parser::utils::common::{gen_generic, item_as_struct, type_as_path}; +use std::collections::BTreeMap; +use syn::punctuated::Punctuated; +use syn::{ + AngleBracketedGenericArguments, File, GenericArgument, Item, PathArguments, Type, TypePath, +}; + +pub fn patch_impls(files: &mut BTreeMap)>) { + for (_, (ast, structs)) in files.iter_mut() { + // Fix the Name implementations + // Since we are borrowing from the same array, we need to split the list to use it + for i in 1..ast.items.len() { + let (left, right) = ast.items.split_at_mut(i); + let item = right.get_mut(0).unwrap(); + if let Item::Impl(impl_item) = item { + // Ignore if its not the Name impl + if let Some(t) = impl_item.trait_.clone() { + if t.1.segments.last().unwrap().ident != "Name" { + continue; + } + } + + // Get the struct that is getting an impl + let implemented = type_as_path(impl_item.self_ty.as_mut()) + .map(|p| p.path.segments.last().unwrap().ident.to_string()) + .unwrap_or("".to_string()); + + let idx = match structs.get(&implemented) { + // If its not a struct then ignore + None => continue, + Some(idx) => *idx, + }; + // Unwrap cause we know its a struct + let struct_item = item_as_struct(left.get_mut(idx).unwrap()).unwrap(); + // Ignore structs with no generics + if struct_item.generics.params.is_empty() { + continue; + } + + impl_item.generics = struct_item.generics.clone(); + + if let Some(impl_path) = type_as_path(impl_item.self_ty.as_mut()) { + let mut args = Punctuated::new(); + + for gen in GENERICS[0..impl_item.generics.params.len()].iter() { + args.push(GenericArgument::Type(Type::Path(TypePath { + qself: None, + path: gen_generic(gen), + }))); + } + + let seg = impl_path.path.segments.last_mut().unwrap(); + + seg.arguments = PathArguments::AngleBracketed(AngleBracketedGenericArguments { + colon2_token: None, + lt_token: Default::default(), + args, + gt_token: Default::default(), + }); + } + } + } + } +} diff --git a/proto-build/src/parser/commands/save.rs b/proto-build/src/parser/commands/save.rs new file mode 100644 index 0000000..8cd612c --- /dev/null +++ b/proto-build/src/parser/commands/save.rs @@ -0,0 +1,38 @@ +use crate::utils::patch_file::patch_file; +use quote::quote; +use regex::Regex; +use std::collections::BTreeMap; +use std::fs; +use std::path::Path; +use syn::File; + +pub fn save(out_dir: &Path, files: &BTreeMap)>) { + for (file, (data, _)) in files.iter() { + // Patch the mod file + let file_regex = Regex::new(r"(\.[[:alnum:]]+\.)rs").unwrap(); + let new_file = file_regex.replace(file, "${1}abstract.rs").to_string(); + + patch_file( + &out_dir.join("mod.rs"), + &[( + &format!( + r"include!\(.{}.\);", + Regex::new(r"(\.)").unwrap().replace(file, r"\.") + ), + &format!( + "\ + #[cfg(not(feature = \"abstract-any\"))]\n\ + include!(\"{}\");\n\ + #[cfg(feature = \"abstract-any\")]\n\ + include!(\"{}\");\ + ", + file, new_file + ), + )], + ) + .unwrap(); + + // Export the generated structure and save in file + fs::write(out_dir.join(new_file), quote!(#data).to_string()).unwrap(); + } +} diff --git a/proto-build/src/parser/consts.rs b/proto-build/src/parser/consts.rs new file mode 100644 index 0000000..960a866 --- /dev/null +++ b/proto-build/src/parser/consts.rs @@ -0,0 +1,2 @@ +// prost Message implements a generic B which was conflicting with this script +pub const GENERICS: [&str; 10] = ["A", "BB", "C", "D", "E", "F", "G", "H", "I", "J"]; diff --git a/proto-build/src/parser/mod.rs b/proto-build/src/parser/mod.rs new file mode 100644 index 0000000..ea087cb --- /dev/null +++ b/proto-build/src/parser/mod.rs @@ -0,0 +1,22 @@ +mod commands; +mod consts; +mod utils; + +use crate::parser::commands::{ + load_and_patch_any::load_and_patch_any, patch_generics::patch_generics, + patch_impls::patch_impls, save::save, +}; +use std::path::Path; + +pub fn generate_advanced_struct(out_dir: &Path) -> crate::Result<()> { + println!("Loading and patching all files containing Any"); + let mut project_tokens = load_and_patch_any(out_dir); + println!("Patching generic trait constraints"); + patch_generics(&mut project_tokens); + println!("Patching prost Name impls"); + patch_impls(&mut project_tokens); + println!("Saving changes"); + save(out_dir, &project_tokens); + + Ok(()) +} diff --git a/proto-build/src/parser/utils/common.rs b/proto-build/src/parser/utils/common.rs new file mode 100644 index 0000000..1b8a65a --- /dev/null +++ b/proto-build/src/parser/utils/common.rs @@ -0,0 +1,45 @@ +use proc_macro2::Span; +use syn::punctuated::Punctuated; +use syn::token::PathSep; +use syn::{ + Fields, FieldsNamed, Ident, Item, ItemStruct, Path, PathArguments, PathSegment, Type, TypePath, +}; + +pub fn item_as_struct(item: &mut Item) -> Option<&mut ItemStruct> { + match item { + Item::Struct(s) => Some(s), + _ => None, + } +} + +pub fn type_as_path(item: &mut Type) -> Option<&mut TypePath> { + match item { + Type::Path(ret) => Some(ret), + _ => None, + } +} + +pub fn fields_as_named(fields: &mut Fields) -> Option<&mut FieldsNamed> { + match fields { + Fields::Named(ret) => Some(ret), + _ => None, + } +} + +pub fn create_punctuated(path: Vec<&str>) -> Punctuated { + let mut ret = Punctuated::new(); + for p in path { + ret.push(PathSegment { + ident: Ident::new(p, Span::call_site()), + arguments: PathArguments::None, + }); + } + ret +} + +pub fn gen_generic(name: &str) -> Path { + Path { + leading_colon: None, + segments: create_punctuated(vec![name]), + } +} diff --git a/proto-build/src/parser/utils/gen_type_param.rs b/proto-build/src/parser/utils/gen_type_param.rs new file mode 100644 index 0000000..a82f855 --- /dev/null +++ b/proto-build/src/parser/utils/gen_type_param.rs @@ -0,0 +1,43 @@ +use crate::parser::utils::common::create_punctuated; +use proc_macro2::{Ident, Span}; +use syn::{Path, TraitBound, TraitBoundModifier, TypeParam, TypeParamBound}; + +fn trait_param_bound(path: Vec<&str>) -> TypeParamBound { + TypeParamBound::Trait(TraitBound { + paren_token: None, + modifier: TraitBoundModifier::None, + lifetimes: None, + path: Path { + leading_colon: None, + segments: create_punctuated(path), + }, + }) +} + +fn gen_unnamed_param(name: &str) -> TypeParam { + let mut type_param = TypeParam::from(Ident::new(name, Span::call_site())); + type_param.bounds.push(trait_param_bound(vec!["Clone"])); + type_param.bounds.push(trait_param_bound(vec!["PartialEq"])); + type_param.bounds.push(trait_param_bound(vec!["Default"])); + type_param.bounds.push(trait_param_bound(vec!["Send"])); + type_param.bounds.push(trait_param_bound(vec!["Sync"])); + type_param + .bounds + .push(trait_param_bound(vec!["prost", "Message"])); + type_param + .bounds + .push(trait_param_bound(vec!["serde", "Serialize"])); + type_param + .bounds + .push(trait_param_bound(vec!["serde", "de", "DeserializeOwned"])); + + type_param +} + +pub fn gen_type_param(name: &str) -> TypeParam { + let mut type_param = gen_unnamed_param(name); + type_param + .bounds + .push(trait_param_bound(vec!["prost", "Name"])); + type_param +} diff --git a/proto-build/src/parser/utils/is_important.rs b/proto-build/src/parser/utils/is_important.rs new file mode 100644 index 0000000..06a7cb0 --- /dev/null +++ b/proto-build/src/parser/utils/is_important.rs @@ -0,0 +1,40 @@ +use crate::parser::utils::common::type_as_path; +use syn::{Field, GenericArgument, PathArguments, TypePath}; + +pub enum FoundEnclosure { + Option, + Vec, +} + +// The types we're going to be modifying only appear enclosed in Options and Vecs +pub fn is_important(field: &mut Field) -> Option<(FoundEnclosure, &mut TypePath)> { + let path = type_as_path(&mut field.ty).unwrap(); + // Get the last segment since the rest is a path + let field_type = path.path.segments.iter_mut().last().unwrap(); + + let field_type_ident = field_type.ident.to_string(); + let found = if &field_type_ident == "Option" { + Some(FoundEnclosure::Option) + } else if &field_type_ident == "Vec" { + Some(FoundEnclosure::Vec) + } else { + None + }; + + // If we found the types we needed + if let Some(enclosed) = found { + if let PathArguments::AngleBracketed(bracket) = &mut field_type.arguments { + for p in bracket.args.iter_mut() { + if let GenericArgument::Type(t) = p { + if let Some(any_path) = type_as_path(t) { + return Some((enclosed, any_path)); + } else { + println!("something else") + } + } + } + } + } + + None +} diff --git a/proto-build/src/parser/utils/mod.rs b/proto-build/src/parser/utils/mod.rs new file mode 100644 index 0000000..2b2b2c7 --- /dev/null +++ b/proto-build/src/parser/utils/mod.rs @@ -0,0 +1,3 @@ +pub mod common; +pub mod gen_type_param; +pub mod is_important; diff --git a/proto-build/src/utils/mod.rs b/proto-build/src/utils/mod.rs new file mode 100644 index 0000000..ef849f9 --- /dev/null +++ b/proto-build/src/utils/mod.rs @@ -0,0 +1,2 @@ +pub mod patch_file; +pub mod run; diff --git a/proto-build/src/utils/patch_file.rs b/proto-build/src/utils/patch_file.rs new file mode 100644 index 0000000..fd38e68 --- /dev/null +++ b/proto-build/src/utils/patch_file.rs @@ -0,0 +1,16 @@ +use regex::Regex; +use std::path::Path; +use std::{fs, io}; + +pub fn patch_file(path: &Path, replacements: &[(&str, &str)]) -> io::Result<()> { + let mut contents = fs::read_to_string(path)?; + + for &(regex, replacement) in replacements { + contents = Regex::new(regex) + .unwrap_or_else(|_| panic!("invalid regex: {}", regex)) + .replace_all(&contents, replacement) + .to_string(); + } + + fs::write(path, &contents) +} diff --git a/proto-build/src/utils/run.rs b/proto-build/src/utils/run.rs new file mode 100644 index 0000000..f8e6247 --- /dev/null +++ b/proto-build/src/utils/run.rs @@ -0,0 +1,63 @@ +use std::ffi::OsStr; +use std::path::Path; +use std::{io, process}; + +pub fn run_git(args: impl IntoIterator>) -> crate::Result { + run_cmd("git", args) +} + +pub fn run_cargo(args: impl IntoIterator>) -> crate::Result { + run_cmd(env!("CARGO"), args) +} + +pub fn run_buf_export( + submodules_dir: &Path, + proto: &str, + export_dir: &Path, +) -> crate::Result { + println!("Exporting {}...", proto); + let proto_path = submodules_dir.join(proto).join("proto"); + run_cmd( + "buf", + [ + "export", + "-o", + &export_dir.display().to_string(), + proto_path.to_str().unwrap(), + ], + ) +} + +pub fn run_cmd( + cmd: impl AsRef, + args: impl IntoIterator>, +) -> crate::Result { + let process::Output { + stdout, + stderr, + status, + } = process::Command::new(&cmd) + .args(args) + .output() + .unwrap_or_else(|e| match e.kind() { + io::ErrorKind::NotFound => panic!( + "error running '{:?}': command not found. Is it installed?", + cmd.as_ref() + ), + _ => panic!("error running '{:?}': {:?}", cmd.as_ref(), e), + }); + + let output = std::str::from_utf8(&stdout)?.trim(); + if !status.success() { + let error = std::str::from_utf8(&stderr)?.trim(); + panic!( + "{:?} exited with error code: {:?}\nstdout: {:?}\nstderr: {:?}", + cmd.as_ref(), + status.code().unwrap_or(-1), + output, + error + ); + } + + Ok(output.to_string()) +}