diff --git a/Cargo.lock b/Cargo.lock index 0eb4f80d..75bf4262 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2282,7 +2282,7 @@ dependencies = [ [[package]] name = "kit" -version = "3.0.1" +version = "3.0.2" dependencies = [ "alloy", "alloy-sol-macro", @@ -2307,6 +2307,7 @@ dependencies = [ "serde", "serde_json", "sha2", + "sha3", "syn 2.0.90", "thiserror 1.0.63", "tokio", diff --git a/Cargo.toml b/Cargo.toml index 9b04a881..3d59a2f4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "kit" authors = ["Sybil Technologies AG"] -version = "3.0.1" +version = "3.0.2" edition = "2021" description = "Development toolkit for Hyperware" homepage = "https://hyperware.ai" @@ -52,6 +52,7 @@ semver = "1.0" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" sha2 = "0.10.8" +sha3 = "0.10.8" syn = { version = "2.0", features = ["full", "visit", "parsing", "extra-traits"] } thiserror = "1.0" tokio = { version = "1.28", features = [ diff --git a/src/build/caller_utils_generator.rs b/src/build/caller_utils_generator.rs index d882f9ab..a45755cf 100644 --- a/src/build/caller_utils_generator.rs +++ b/src/build/caller_utils_generator.rs @@ -142,50 +142,6 @@ fn wit_type_to_rust(wit_type: &str) -> String { } } -// Generate default value for Rust type - IMPROVED with additional types -fn generate_default_value(rust_type: &str) -> String { - match rust_type { - // Integer types - "i8" | "u8" | "i16" | "u16" | "i32" | "u32" | "i64" | "u64" | "isize" | "usize" => { - "0".to_string() - } - // Floating point types - "f32" | "f64" => "0.0".to_string(), - // String types - "String" => "String::new()".to_string(), - "&str" => "\"\"".to_string(), - // Other primitive types - "bool" => "false".to_string(), - "char" => "'\\0'".to_string(), - "()" => "()".to_string(), - // Collection types - t if t.starts_with("Vec<") => "Vec::new()".to_string(), - t if t.starts_with("Option<") => "None".to_string(), - t if t.starts_with("Result<") => { - // For Result, default to Ok with the default value of the success type - if let Some(success_type_end) = t.find(',') { - let success_type = &t[7..success_type_end]; - format!("Ok({})", generate_default_value(success_type)) - } else { - "Ok(())".to_string() - } - } - //t if t.starts_with("HashMap<") => "HashMap::new()".to_string(), - t if t.starts_with("(") => { - // Generate default tuple with default values for each element - let inner_part = t.trim_start_matches('(').trim_end_matches(')'); - let parts: Vec<_> = inner_part.split(", ").collect(); - let default_values: Vec<_> = parts - .iter() - .map(|part| generate_default_value(part)) - .collect(); - format!("({})", default_values.join(", ")) - } - // For custom types, assume they implement Default - _ => format!("{}::default()", rust_type), - } -} - // Structure to represent a field in a WIT signature struct #[derive(Debug)] struct SignatureField { @@ -346,7 +302,7 @@ fn parse_wit_file(file_path: &Path) -> Result<(Vec, Vec } // Generate a Rust async function from a signature struct -fn generate_async_function(signature: &SignatureStruct) -> String { +fn generate_async_function(signature: &SignatureStruct) -> Option { // Convert function name from kebab-case to snake_case let snake_function_name = to_snake_case(&signature.function_name); @@ -405,62 +361,14 @@ fn generate_async_function(signature: &SignatureStruct) -> String { // For HTTP endpoints, generate commented-out implementation if signature.attr_type == "http" { - debug!("Generating commented-out stub for HTTP endpoint"); - let default_value = generate_default_value(&return_type); - - // Add underscore prefix to all parameters for HTTP stubs - let all_params_with_underscore = if target_param.is_empty() { - params - .iter() - .map(|param| { - let parts: Vec<&str> = param.split(':').collect(); - if parts.len() == 2 { - format!("_{}: {}", parts[0], parts[1]) - } else { - warn!(param = %param, "Could not parse parameter for underscore prefix"); - format!("_{}", param) - } - }) - .collect::>() - .join(", ") - } else { - let target_with_underscore = format!("_target: {}", target_param); - if params.is_empty() { - target_with_underscore - } else { - let params_with_underscore = params - .iter() - .map(|param| { - let parts: Vec<&str> = param.split(':').collect(); - if parts.len() == 2 { - format!("_{}: {}", parts[0], parts[1]) - } else { - warn!(param = %param, "Could not parse parameter for underscore prefix"); - format!("_{}", param) - } - }) - .collect::>() - .join(", "); - format!("{}, {}", target_with_underscore, params_with_underscore) - } - }; - - return format!( - "// /// Generated stub for `{}` {} RPC call\n// /// HTTP endpoint - uncomment to implement\n// pub async fn {}({}) -> {} {{\n// // TODO: Implement HTTP endpoint\n// Ok({})\n// }}", - signature.function_name, - signature.attr_type, - full_function_name, - all_params_with_underscore, - wrapped_return_type, - default_value - ); + return None; } // Format JSON parameters correctly let json_params = if param_names.is_empty() { // No parameters case debug!("Generating JSON with no parameters"); - format!("json!({{\"{}\" : {{}}}})", pascal_function_name) + format!("json!({{\"{}\" : null}})", pascal_function_name) } else if param_names.len() == 1 { // Single parameter case debug!(param = %param_names[0], "Generating JSON with single parameter"); @@ -480,7 +388,7 @@ fn generate_async_function(signature: &SignatureStruct) -> String { // Generate function with implementation using send debug!("Generating standard RPC stub implementation"); - format!( + Some(format!( "/// Generated stub for `{}` {} RPC call\npub async fn {}({}) -> {} {{\n let body = {};\n let body = serde_json::to_vec(&body).unwrap();\n let request = Request::to(target)\n .body(body);\n send::<{}>(request).await\n}}", signature.function_name, signature.attr_type, @@ -489,7 +397,7 @@ fn generate_async_function(signature: &SignatureStruct) -> String { wrapped_return_type, json_params, return_type - ) + )) } // Create the caller-utils crate with a single lib.rs file @@ -507,9 +415,9 @@ fn create_caller_utils_crate(api_dir: &Path, base_dir: &Path) -> Result<()> { fs::create_dir_all(caller_utils_dir.join("src"))?; debug!("Created project directory structure"); - // Get hyperware_app_common dependency from the process's Cargo.toml - let hyperware_dep = get_hyperware_app_common_dependency(base_dir)?; - debug!("Got hyperware_app_common dependency: {}", hyperware_dep); + // Get hyperware_process_lib dependency from the process's Cargo.toml + let hyperware_dep = get_hyperware_process_lib_dependency(base_dir)?; + debug!("Got hyperware_process_lib dependency: {}", hyperware_dep); // Create Cargo.toml with updated dependencies let cargo_toml = format!( @@ -525,7 +433,7 @@ process_macros = "0.1.0" futures-util = "0.3" serde = {{ version = "1.0", features = ["derive"] }} serde_json = "1.0" -hyperware_app_common = {} +hyperware_process_lib = {} once_cell = "1.20.2" futures = "0.3" uuid = {{ version = "1.0" }} @@ -621,9 +529,10 @@ crate-type = ["cdylib", "lib"] // Add function implementations for signature in &signatures { - let function_impl = generate_async_function(signature); - mod_content.push_str(&function_impl); - mod_content.push_str("\n\n"); + if let Some(function_impl) = generate_async_function(signature) { + mod_content.push_str(&function_impl); + mod_content.push_str("\n\n"); + } } // Store the module content @@ -672,9 +581,8 @@ crate-type = ["cdylib", "lib"] lib_rs.push_str("/// Generated caller utilities for RPC function stubs\n\n"); // Add global imports - lib_rs.push_str("pub use hyperware_app_common::AppSendError;\n"); - lib_rs.push_str("pub use hyperware_app_common::send;\n"); - lib_rs.push_str("use hyperware_app_common::hyperware_process_lib as hyperware_process_lib;\n"); + lib_rs.push_str("pub use hyperware_process_lib::hyperapp::AppSendError;\n"); + lib_rs.push_str("pub use hyperware_process_lib::hyperapp::send;\n"); lib_rs.push_str("pub use hyperware_process_lib::{Address, Request};\n"); lib_rs.push_str("use serde_json::json;\n\n"); @@ -799,9 +707,9 @@ fn read_cargo_toml(path: &Path) -> Result { .with_context(|| format!("Failed to parse Cargo.toml: {}", path.display())) } -// Get hyperware_app_common dependency from the process Cargo.toml files +// Get hyperware_process_lib dependency from the process Cargo.toml files #[instrument(level = "trace", skip_all)] -fn get_hyperware_app_common_dependency(base_dir: &Path) -> Result { +fn get_hyperware_process_lib_dependency(base_dir: &Path) -> Result { const DEFAULT_DEP: &str = r#"{ git = "https://github.com/hyperware-ai/hyperprocess-macro", rev = "4c944b2" }"#; @@ -813,7 +721,7 @@ fn get_hyperware_app_common_dependency(base_dir: &Path) -> Result { .and_then(|m| m.as_array()) .ok_or_else(|| eyre!("No workspace.members found in Cargo.toml"))?; - // Collect hyperware_app_common dependencies from all process members + // Collect hyperware_process_lib dependencies from all process members let mut found_deps = HashMap::new(); for member in members.iter().filter_map(|m| m.as_str()) { @@ -835,10 +743,10 @@ fn get_hyperware_app_common_dependency(base_dir: &Path) -> Result { if let Some(dep) = member_toml .get("dependencies") - .and_then(|d| d.get("hyperware_app_common")) + .and_then(|d| d.get("hyperware_process_lib")) .and_then(format_toml_dependency) { - debug!("Found hyperware_app_common in {}: {}", member, dep); + debug!("Found hyperware_process_lib in {}: {}", member, dep); found_deps.insert(member.to_string(), dep); } } @@ -846,12 +754,12 @@ fn get_hyperware_app_common_dependency(base_dir: &Path) -> Result { // Handle results match found_deps.len() { 0 => { - warn!("No hyperware_app_common dependencies found in any process, using default"); + warn!("No hyperware_process_lib dependencies found in any process, using default"); Ok(DEFAULT_DEP.to_string()) } 1 => { let dep = found_deps.values().next().unwrap(); - info!("Using hyperware_app_common dependency: {}", dep); + info!("Using hyperware_process_lib dependency: {}", dep); Ok(dep.clone()) } _ => { @@ -865,13 +773,13 @@ fn get_hyperware_app_common_dependency(base_dir: &Path) -> Result { found_deps.iter().find(|(_, d)| *d == first_dep).unwrap(); let (conflict_process, _) = found_deps.iter().find(|(_, d)| *d == dep).unwrap(); bail!( - "Conflicting hyperware_app_common versions found:\n Process '{}': {}\n Process '{}': {}\nAll processes must use the same version.", + "Conflicting hyperware_process_lib versions found:\n Process '{}': {}\n Process '{}': {}\nAll processes must use the same version.", first_process, first_dep, conflict_process, dep ); } } - info!("Using hyperware_app_common dependency: {}", first_dep); + info!("Using hyperware_process_lib dependency: {}", first_dep); Ok(first_dep.clone()) } } @@ -913,10 +821,10 @@ fn update_workspace_cargo_toml(base_dir: &Path) -> Result<()> { // Check if caller-utils is already in the members list let caller_utils_exists = members_array .iter() - .any(|m| m.as_str().map_or(false, |s| s == "target/caller-utils")); + .any(|m| m.as_str().map_or(false, |s| s == "target/caller-util?")); if !caller_utils_exists { - members_array.push(Value::String("target/caller-utils".to_string())); + members_array.push(Value::String("target/caller-util?".to_string())); // Write back the updated TOML let updated_content = toml::to_string_pretty(&parsed_toml) @@ -932,7 +840,7 @@ fn update_workspace_cargo_toml(base_dir: &Path) -> Result<()> { debug!("Successfully updated workspace Cargo.toml"); } else { debug!( - "Workspace Cargo.toml already up-to-date regarding caller-utils member." + "Workspace Cargo.toml already up-to-date regarding caller-util? member." ); } } @@ -979,6 +887,7 @@ pub fn add_caller_utils_to_projects(projects: &[PathBuf]) -> Result<()> { "path".to_string(), Value::String("../target/caller-utils".to_string()), ); + t.insert("optional".to_string(), Value::Boolean(true)); t }), ); diff --git a/src/build/caller_utils_ts_generator.rs b/src/build/caller_utils_ts_generator.rs index d72ab994..bce2418f 100644 --- a/src/build/caller_utils_ts_generator.rs +++ b/src/build/caller_utils_ts_generator.rs @@ -6,29 +6,24 @@ use tracing::{debug, info, instrument, warn}; use walkdir::WalkDir; -// Convert kebab-case to camelCase -pub fn to_camel_case(s: &str) -> String { - let parts: Vec<&str> = s.split('-').collect(); - if parts.is_empty() { - return String::new(); - } +// Strip % prefix from WIT identifiers (used to escape keywords) +fn strip_wit_escape(s: &str) -> &str { + s.strip_prefix('%').unwrap_or(s) +} - let mut result = parts[0].to_string(); - for part in &parts[1..] { - if !part.is_empty() { - let mut chars = part.chars(); - if let Some(first_char) = chars.next() { - result.push(first_char.to_uppercase().next().unwrap()); - result.extend(chars); - } - } - } +// Convert kebab-case to snake_case +pub fn to_snake_case(s: &str) -> String { + // Strip % prefix if present + let s = strip_wit_escape(s); - result + s.chars().map(|c| if c == '-' { '_' } else { c }).collect() } // Convert kebab-case to PascalCase pub fn to_pascal_case(s: &str) -> String { + // Strip % prefix if present + let s = strip_wit_escape(s); + let parts = s.split('-'); let mut result = String::new(); @@ -74,9 +69,25 @@ fn wit_type_to_typescript(wit_type: &str) -> String { } t if t.starts_with("result<") => { let inner_part = &t[7..t.len() - 1]; - if let Some(comma_pos) = inner_part.find(',') { - let ok_type = &inner_part[..comma_pos].trim(); - let err_type = &inner_part[comma_pos + 1..].trim(); + // Find the comma that separates Ok and Err types, handling nested generics + let mut depth = 0; + let mut comma_pos = None; + + for (i, ch) in inner_part.chars().enumerate() { + match ch { + '<' => depth += 1, + '>' => depth -= 1, + ',' if depth == 0 => { + comma_pos = Some(i); + break; + } + _ => {} + } + } + + if let Some(pos) = comma_pos { + let ok_type = inner_part[..pos].trim(); + let err_type = inner_part[pos + 1..].trim(); format!( "{{ Ok: {} }} | {{ Err: {} }}", wit_type_to_typescript(ok_type), @@ -91,10 +102,38 @@ fn wit_type_to_typescript(wit_type: &str) -> String { } t if t.starts_with("tuple<") => { let inner_types = &t[6..t.len() - 1]; - let ts_types: Vec = inner_types - .split(", ") - .map(|t| wit_type_to_typescript(t)) - .collect(); + // Parse tuple elements correctly, handling nested generics + let mut elements = Vec::new(); + let mut current = String::new(); + let mut depth = 0; + + for ch in inner_types.chars() { + match ch { + '<' => { + depth += 1; + current.push(ch); + } + '>' => { + depth -= 1; + current.push(ch); + } + ',' if depth == 0 => { + // Only split on commas at the top level + elements.push(current.trim().to_string()); + current.clear(); + } + _ => { + current.push(ch); + } + } + } + // Don't forget the last element + if !current.trim().is_empty() { + elements.push(current.trim().to_string()); + } + + let ts_types: Vec = + elements.iter().map(|t| wit_type_to_typescript(t)).collect(); format!("[{}]", ts_types.join(", ")) } // Custom types (in kebab-case) need to be converted to PascalCase @@ -106,8 +145,24 @@ fn wit_type_to_typescript(wit_type: &str) -> String { fn extract_result_ok_type(wit_type: &str) -> Option { if wit_type.starts_with("result<") { let inner_part = &wit_type[7..wit_type.len() - 1]; - if let Some(comma_pos) = inner_part.find(',') { - let ok_type = inner_part[..comma_pos].trim(); + // Find the comma that separates Ok and Err types, handling nested generics + let mut depth = 0; + let mut comma_pos = None; + + for (i, ch) in inner_part.chars().enumerate() { + match ch { + '<' => depth += 1, + '>' => depth -= 1, + ',' if depth == 0 => { + comma_pos = Some(i); + break; + } + _ => {} + } + } + + if let Some(pos) = comma_pos { + let ok_type = inner_part[..pos].trim(); Some(wit_type_to_typescript(ok_type)) } else { // Result with no error type @@ -180,6 +235,9 @@ fn parse_wit_file(file_path: &Path) -> Result { .trim_end_matches(" {") .trim(); + // Strip % prefix if present + let record_name = strip_wit_escape(record_name); + if record_name.contains("-signature-") { // This is a signature record debug!(name = %record_name, "Found signature record"); @@ -212,7 +270,7 @@ fn parse_wit_file(file_path: &Path) -> Result { // Parse field definition let field_parts: Vec<_> = field_line.split(':').collect(); if field_parts.len() == 2 { - let field_name = field_parts[0].trim().to_string(); + let field_name = strip_wit_escape(field_parts[0].trim()).to_string(); let field_type = field_parts[1].trim().trim_end_matches(',').to_string(); debug!(name = %field_name, wit_type = %field_type, "Found field"); @@ -250,7 +308,7 @@ fn parse_wit_file(file_path: &Path) -> Result { // Parse field definition let field_parts: Vec<_> = field_line.split(':').collect(); if field_parts.len() == 2 { - let field_name = field_parts[0].trim().to_string(); + let field_name = strip_wit_escape(field_parts[0].trim()).to_string(); let field_type = field_parts[1].trim().trim_end_matches(',').to_string(); debug!(name = %field_name, wit_type = %field_type, "Found field"); @@ -275,6 +333,9 @@ fn parse_wit_file(file_path: &Path) -> Result { .trim_start_matches("variant ") .trim_end_matches(" {") .trim(); + + // Strip % prefix if present + let variant_name = strip_wit_escape(variant_name); debug!(name = %variant_name, "Found variant"); // Parse cases @@ -291,7 +352,13 @@ fn parse_wit_file(file_path: &Path) -> Result { } // Parse case - just the name, ignoring any associated data for now - let case_name = case_line.trim_end_matches(',').to_string(); + let case_raw = case_line.trim_end_matches(','); + // Extract case name (might have associated type in parentheses) + let case_name = if let Some(paren_pos) = case_raw.find('(') { + strip_wit_escape(&case_raw[..paren_pos]).to_string() + } else { + strip_wit_escape(case_raw).to_string() + }; debug!(case = %case_name, "Found variant case"); cases.push(case_name); @@ -327,7 +394,7 @@ fn generate_typescript_interface(record: &WitRecord) -> String { let mut fields = Vec::new(); for field in &record.fields { - let field_name = to_camel_case(&field.name); + let field_name = to_snake_case(&field.name); let ts_type = wit_type_to_typescript(&field.wit_type); fields.push(format!(" {}: {};", field_name, ts_type)); } @@ -354,7 +421,7 @@ fn generate_typescript_variant(variant: &WitVariant) -> String { // Generate TypeScript interface and function from a signature struct fn generate_typescript_function(signature: &SignatureStruct) -> (String, String, String) { // Convert function name from kebab-case to camelCase - let camel_function_name = to_camel_case(&signature.function_name); + let camel_function_name = to_snake_case(&signature.function_name); let pascal_function_name = to_pascal_case(&signature.function_name); debug!(name = %camel_function_name, "Generating TypeScript function"); @@ -367,7 +434,7 @@ fn generate_typescript_function(signature: &SignatureStruct) -> (String, String, let mut unwrapped_return_type = "void".to_string(); for field in &signature.fields { - let field_name_camel = to_camel_case(&field.name); + let field_name_camel = to_snake_case(&field.name); let ts_type = wit_type_to_typescript(&field.wit_type); debug!(field = %field.name, wit_type = %field.wit_type, ts_type = %ts_type, "Processing field"); @@ -590,7 +657,7 @@ pub fn create_typescript_caller_utils(base_dir: &Path, api_dir: &Path) -> Result all_interfaces.push(interface_def); all_types.push(type_def); all_functions.push(function_def); - function_names.push(to_camel_case(&signature.function_name)); + function_names.push(to_snake_case(&signature.function_name)); } } } diff --git a/src/build/mod.rs b/src/build/mod.rs index 43a6f44e..6d98ffe1 100644 --- a/src/build/mod.rs +++ b/src/build/mod.rs @@ -153,15 +153,39 @@ pub fn has_feature(cargo_toml_path: &str, feature: &str) -> Result { pub fn remove_missing_features(cargo_toml_path: &Path, features: Vec<&str>) -> Result> { let cargo_toml_content = fs::read_to_string(cargo_toml_path)?; let cargo_toml: toml::Value = cargo_toml_content.parse()?; - let Some(cargo_features) = cargo_toml.get("features").and_then(|f| f.as_table()) else { - return Ok(vec![]); - }; + let cargo_features = cargo_toml.get("features").and_then(|f| f.as_table()); + + // Check for optional dependencies which implicitly create features + let optional_deps: HashSet = + if let Some(dependencies) = cargo_toml.get("dependencies").and_then(|d| d.as_table()) { + dependencies + .iter() + .filter_map(|(name, dep)| { + // Check if this dependency is marked as optional + if let Some(dep_table) = dep.as_table() { + if dep_table + .get("optional") + .and_then(|o| o.as_bool()) + .unwrap_or(false) + { + return Some(name.clone()); + } + } + None + }) + .collect() + } else { + HashSet::new() + }; Ok(features .iter() .filter_map(|f| { let f = f.to_string(); - if cargo_features.contains_key(&f) { + // Check if it's an explicit feature or an optional dependency + if (cargo_features.is_some() && cargo_features.unwrap().contains_key(&f)) + || optional_deps.contains(&f) + { Some(f) } else { None @@ -1741,6 +1765,7 @@ pub async fn execute( verbose={verbose}, ignore_deps={ignore_deps}," ); + let package_dir = fs::canonicalize(package_dir)?; if no_ui && ui_only { return Err(eyre!( "Cannot set both `no_ui` and `ui_only` to true at the same time" @@ -1766,7 +1791,7 @@ pub async fn execute( &build_with_cludes_path, features, &cludes, - package_dir, + &package_dir, )? { return Ok(()); @@ -1804,9 +1829,9 @@ pub async fn execute( // if `!rewrite`, that is just `package_dir`; // else, it is the modified copy that is in `target/rewrite/` let live_dir = if !rewrite { - PathBuf::from(package_dir) + PathBuf::from(&package_dir) } else { - copy_and_rewrite_package(package_dir)? + copy_and_rewrite_package(&package_dir)? }; let hyperapp_processed_projects = if !hyperapp { @@ -1868,9 +1893,9 @@ pub async fn execute( copy_dir(live_dir.join("pkg"), package_dir.join("pkg"))?; } - let metadata = read_metadata(package_dir)?; + let metadata = read_metadata(&package_dir)?; let pkg_publisher = make_pkg_publisher(&metadata); - let (_zip_filename, hash_string) = zip_pkg(package_dir, &pkg_publisher)?; + let (_zip_filename, hash_string) = zip_pkg(&package_dir, &pkg_publisher)?; info!("package zip hash: {hash_string}"); Ok(()) diff --git a/src/build/wit_generator.rs b/src/build/wit_generator.rs index 15ead1b4..2a879750 100644 --- a/src/build/wit_generator.rs +++ b/src/build/wit_generator.rs @@ -11,6 +11,53 @@ use toml::Value; use tracing::{debug, info, instrument, warn}; use walkdir::WalkDir; +// List of WIT keywords that need to be prefixed with % +fn is_wit_keyword(s: &str) -> bool { + matches!( + s, + "use" + | "type" + | "resource" + | "func" + | "record" + | "enum" + | "flags" + | "variant" + | "static" + | "interface" + | "world" + | "import" + | "export" + | "package" + | "constructor" + | "include" + | "with" + | "as" + | "from" + | "list" + | "option" + | "result" + | "tuple" + | "future" + | "stream" + | "own" + | "borrow" + | "u8" + | "u16" + | "u32" + | "u64" + | "s8" + | "s16" + | "s32" + | "s64" + | "f32" + | "f64" + | "char" + | "bool" + | "string" + ) +} + // Helper functions for naming conventions fn to_kebab_case(s: &str) -> String { // First, handle the case where the input has underscores @@ -42,6 +89,15 @@ fn to_kebab_case(s: &str) -> String { result } +// Convert a name to valid WIT identifier, prefixing with % if it's a keyword +fn to_wit_ident(kebab_name: &str) -> String { + if is_wit_keyword(kebab_name) { + format!("%{}", kebab_name) + } else { + kebab_name.to_string() + } +} + // Validates a name doesn't contain numbers or "stream" fn validate_name(name: &str, kind: &str) -> Result<()> { // Check for numbers @@ -139,41 +195,6 @@ fn is_wit_primitive_or_builtin(type_name: &str) -> bool { || type_name.starts_with("tuple<") } -// Extract custom type names from a WIT type string (e.g., "list" -> ["foo-bar"]) -fn extract_custom_types_from_wit_type(wit_type: &str) -> Vec { - let mut custom_types = Vec::new(); - - // Skip if it's a primitive type - if is_wit_primitive_or_builtin(wit_type) && !wit_type.contains('<') { - return custom_types; - } - - // Handle composite types like list, option, result, tuple - if let Some(start) = wit_type.find('<') { - if let Some(end) = wit_type.rfind('>') { - let inner = &wit_type[start + 1..end]; - - // Split by comma to handle multiple type parameters - for part in inner.split(',') { - let trimmed = part.trim(); - if !trimmed.is_empty() && trimmed != "_" && !is_wit_primitive_or_builtin(trimmed) { - // Recursively extract from nested types - if trimmed.contains('<') { - custom_types.extend(extract_custom_types_from_wit_type(trimmed)); - } else { - custom_types.push(trimmed.to_string()); - } - } - } - } - } else if !is_wit_primitive_or_builtin(wit_type) { - // It's a non-composite custom type - custom_types.push(wit_type.to_string()); - } - - custom_types -} - // Convert Rust type to WIT type, including downstream types #[instrument(level = "trace", skip_all)] fn rust_type_to_wit(ty: &Type, used_types: &mut HashSet) -> Result { @@ -361,189 +382,6 @@ fn find_rust_files(crate_path: &Path) -> Vec { rust_files } -// Searches a single file for a specific type definition (struct or enum) by its kebab-case name. -// If found, generates its WIT definition string and returns it along with any new custom type -// dependencies discovered within its fields/variants. -#[instrument(level = "trace", skip_all)] -fn find_and_make_wit_type_def( - file_path: &Path, - target_kebab_type_name: &str, - global_used_types: &mut HashSet, // Track all used types globally -) -> Result)>> { - // Return: Ok(Some((wit_def, new_local_deps))), Ok(None), or Err - debug!( - file_path = %file_path.display(), - target_type = %target_kebab_type_name, - "Searching for type definition" - ); - - let content = fs::read_to_string(file_path) - .with_context(|| format!("Failed to read file: {}", file_path.display()))?; - - let ast = syn::parse_file(&content) - .with_context(|| format!("Failed to parse file: {}", file_path.display()))?; - - for item in &ast.items { - // Determine if the current item matches the target type name - let (is_target, item_kind, orig_name) = match item { - Item::Struct(s) => { - let name = s.ident.to_string(); - ( - to_kebab_case(&name) == target_kebab_type_name, - "Struct", - name, - ) - } - Item::Enum(e) => { - let name = e.ident.to_string(); - (to_kebab_case(&name) == target_kebab_type_name, "Enum", name) - } - _ => (false, "", String::new()), - }; - - if is_target { - // Skip internal-looking types (can be adjusted) - if orig_name.contains("__") { - warn!(name = %orig_name, "Skipping definition search for likely internal type"); - return Ok(None); // Treat as not found for WIT purposes - } - // Validate the original Rust name - validate_name(&orig_name, item_kind)?; - - let kebab_name = target_kebab_type_name; // We know this matches - let mut local_dependencies = HashSet::new(); // Track deps discovered *by this type* - - // --- Generate Struct Definition --- - if let Item::Struct(item_struct) = item { - let fields_result: Result> = match &item_struct.fields { - syn::Fields::Named(fields) => { - let mut field_strings = Vec::new(); - for f in &fields.named { - if let Some(field_ident) = &f.ident { - let field_orig_name = field_ident.to_string(); - // Validate field name (allow underscore stripping) - let stripped_field_orig_name = - check_and_strip_leading_underscore(field_orig_name.clone()); - // Validate the potentially stripped name, adding context about the rules - validate_name(&stripped_field_orig_name, "Field")?; - - let field_kebab_name = to_kebab_case(&stripped_field_orig_name); - if field_kebab_name.is_empty() { - warn!(struct_name=%kebab_name, field_original_name=%field_orig_name, "Skipping field with empty kebab-case name"); - continue; - } - - // Convert field type. `rust_type_to_wit` adds any new custom types - // found within the field type (e.g., in list) to `global_used_types`. - let field_wit_type = rust_type_to_wit(&f.ty, global_used_types) - .wrap_err_with(|| format!("Failed to convert field '{}':'{:?}' in struct '{}'", field_orig_name, f.ty, orig_name))?; - - // Extract any custom types from the field type and add them to local dependencies - // For example, from "list" we extract "participant-info" - for custom_type in extract_custom_types_from_wit_type(&field_wit_type) { - local_dependencies.insert(custom_type); - } - - field_strings.push(format!(" {}: {}", field_kebab_name, field_wit_type)); - } - } - Ok(field_strings) - } - // Handle Unit Structs as empty records - syn::Fields::Unit => Ok(Vec::new()), - // Decide how to handle Tuple Structs (e.g., error, skip, specific WIT representation?) - syn::Fields::Unnamed(_) => bail!("Tuple structs ('struct {} (...)') are not currently supported for WIT generation.", orig_name), - }; - - match fields_result { - Ok(fields_vec) => { - // Generate record definition (use {} for empty records) - let definition = if fields_vec.is_empty() { - format!(" record {} {{}}", kebab_name) - } else { - format!( - " record {} {{\n{}\n }}", - kebab_name, - fields_vec.join(",\n") - ) - }; - debug!(type_name = %kebab_name, "Generated record definition"); - return Ok(Some((definition, local_dependencies))); - } - Err(e) => return Err(e), // Propagate field processing error - } - } - - // --- Generate Enum Definition --- - if let Item::Enum(item_enum) = item { - let mut variants_wit = Vec::new(); - let mut skip_enum = false; - - for v in &item_enum.variants { - let variant_orig_name = v.ident.to_string(); - // Validate variant name before proceeding - validate_name(&variant_orig_name, "Enum variant")?; - let variant_kebab_name = to_kebab_case(&variant_orig_name); - - match &v.fields { - // Variant with one unnamed field: T -> case(T) - syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => { - // `rust_type_to_wit` adds new custom types to `global_used_types` - let type_result = rust_type_to_wit( - &fields.unnamed.first().unwrap().ty, - global_used_types, - ) - .wrap_err_with(|| { - format!( - "Failed to convert variant '{}' type in enum '{}'", - variant_orig_name, orig_name - ) - })?; - - // Extract any custom types from the variant type and add them to local dependencies - for custom_type in extract_custom_types_from_wit_type(&type_result) { - local_dependencies.insert(custom_type); - } - variants_wit - .push(format!(" {}({})", variant_kebab_name, type_result)); - } - // Unit variant: -> case - syn::Fields::Unit => { - variants_wit.push(format!(" {}", variant_kebab_name)); - } - // Variants with named fields or multiple unnamed fields are not directly supported by WIT variants - _ => { - warn!(enum_name = %kebab_name, variant_name = %variant_orig_name, "Skipping complex enum variant (only unit variants or single-type variants like 'MyVariant(MyType)' are supported)"); - skip_enum = true; - break; // Skip the whole enum if one variant is complex - } - } - } - - // Only generate if not skipped and has convertible variants - if !skip_enum && !variants_wit.is_empty() { - let definition = format!( - " variant {} {{\n{}\n }}", - kebab_name, - variants_wit.join(",\n") - ); - debug!(type_name = %kebab_name, "Generated variant definition"); - return Ok(Some((definition, local_dependencies))); - } else { - // Treat as not found for WIT generation if skipped or empty - warn!(name = %kebab_name, "Skipping enum definition due to complex/invalid variants or no convertible variants"); - return Ok(None); - } - } - // Should not be reached if item is Struct or Enum and is_target is true - unreachable!("Target type matched but was neither struct nor enum?"); - } - } - - // Target type definition was not found in this specific file - Ok(None) -} - // Find all relevant Rust projects fn find_rust_projects(base_dir: &Path) -> Vec { let mut projects = Vec::new(); @@ -655,13 +493,14 @@ fn generate_signature_struct( let stripped_param_name = check_and_strip_leading_underscore(param_orig_name.clone()); // Clone needed let param_name = to_kebab_case(&stripped_param_name); + let param_wit_ident = to_wit_ident(¶m_name); // Rust type to WIT type match rust_type_to_wit(&pat_type.ty, used_types) { Ok(param_type) => { // Add field directly to the struct struct_fields - .push(format!(" {}: {}", param_name, param_type)); + .push(format!(" {}: {}", param_wit_ident, param_type)); } Err(e) => { // Wrap parameter type conversion error with context @@ -794,6 +633,182 @@ impl AsTypePath for syn::Type { } } +// Helper function to collect all type definitions from a file +#[instrument(level = "trace", skip_all)] +fn collect_type_definitions_from_file( + file_path: &Path, + type_definitions: &mut HashMap, // kebab-name -> WIT definition +) -> Result<()> { + debug!(file_path = %file_path.display(), "Collecting type definitions from file"); + + let content = fs::read_to_string(file_path) + .with_context(|| format!("Failed to read file: {}", file_path.display()))?; + + let ast = syn::parse_file(&content) + .with_context(|| format!("Failed to parse file: {}", file_path.display()))?; + + // Temporary HashSet for tracking dependencies during collection + let mut temp_used_types = HashSet::new(); + + for item in &ast.items { + match item { + Item::Struct(s) => { + let name = s.ident.to_string(); + // Skip internal types + if name.contains("__") { + continue; + } + + // Validate name + if let Err(e) = validate_name(&name, "Struct") { + warn!(name = %name, error = %e, "Skipping struct with invalid name"); + continue; + } + + let kebab_name = to_kebab_case(&name); + + // Generate WIT definition for this struct + let fields_result: Result> = match &s.fields { + syn::Fields::Named(fields) => { + let mut field_strings = Vec::new(); + for f in &fields.named { + if let Some(field_ident) = &f.ident { + let field_orig_name = field_ident.to_string(); + let stripped_field_orig_name = + check_and_strip_leading_underscore(field_orig_name.clone()); + + if let Err(e) = validate_name(&stripped_field_orig_name, "Field") { + warn!(field_name = %field_orig_name, error = %e, "Skipping field with invalid name"); + continue; + } + + let field_kebab_name = to_kebab_case(&stripped_field_orig_name); + if field_kebab_name.is_empty() { + continue; + } + + // Convert field type + match rust_type_to_wit(&f.ty, &mut temp_used_types) { + Ok(field_wit_type) => { + let field_wit_ident = to_wit_ident(&field_kebab_name); + field_strings.push(format!( + " {}: {}", + field_wit_ident, field_wit_type + )); + } + Err(e) => { + warn!(field = %field_orig_name, error = %e, "Failed to convert field type"); + return Err(e.wrap_err(format!( + "Failed to convert field '{}' in struct '{}'", + field_orig_name, name + ))); + } + } + } + } + Ok(field_strings) + } + syn::Fields::Unit => Ok(Vec::new()), + syn::Fields::Unnamed(_) => { + warn!(struct_name = %name, "Skipping tuple struct"); + continue; + } + }; + + match fields_result { + Ok(fields_vec) => { + let wit_ident = to_wit_ident(&kebab_name); + let definition = if fields_vec.is_empty() { + format!(" record {} {{}}", wit_ident) + } else { + format!( + " record {} {{\n{}\n }}", + wit_ident, + fields_vec.join(",\n") + ) + }; + type_definitions.insert(kebab_name, definition); + } + Err(e) => { + warn!(struct_name = %name, error = %e, "Failed to process struct"); + } + } + } + Item::Enum(e) => { + let name = e.ident.to_string(); + // Skip internal types + if name.contains("__") { + continue; + } + + // Validate name + if let Err(e) = validate_name(&name, "Enum") { + warn!(name = %name, error = %e, "Skipping enum with invalid name"); + continue; + } + + let kebab_name = to_kebab_case(&name); + let mut variants_wit = Vec::new(); + let mut skip_enum = false; + + for v in &e.variants { + let variant_orig_name = v.ident.to_string(); + if let Err(e) = validate_name(&variant_orig_name, "Enum variant") { + warn!(variant = %variant_orig_name, error = %e, "Skipping variant with invalid name"); + skip_enum = true; + break; + } + + let variant_kebab_name = to_kebab_case(&variant_orig_name); + let variant_wit_ident = to_wit_ident(&variant_kebab_name); + + match &v.fields { + syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => { + match rust_type_to_wit( + &fields.unnamed.first().unwrap().ty, + &mut temp_used_types, + ) { + Ok(type_result) => { + variants_wit.push(format!( + " {}({})", + variant_wit_ident, type_result + )); + } + Err(e) => { + warn!(variant = %variant_orig_name, error = %e, "Failed to convert variant type"); + skip_enum = true; + break; + } + } + } + syn::Fields::Unit => { + variants_wit.push(format!(" {}", variant_wit_ident)); + } + _ => { + warn!(enum_name = %kebab_name, variant_name = %variant_orig_name, "Skipping complex enum variant"); + skip_enum = true; + break; + } + } + } + + if !skip_enum && !variants_wit.is_empty() { + let wit_ident = to_wit_ident(&kebab_name); + let definition = format!( + " variant {} {{\n{}\n }}", + wit_ident, + variants_wit.join(",\n") + ); + type_definitions.insert(kebab_name, definition); + } + } + _ => {} + } + } + + Ok(()) +} + // Process a single Rust project and generate WIT files #[instrument(level = "trace", skip_all)] fn process_rust_project(project_path: &Path, api_dir: &Path) -> Result> { @@ -823,7 +838,20 @@ fn process_rust_project(project_path: &Path, api_dir: &Path) -> Result Result Result Result Result Result WIT definition string - let mut types_to_find_queue: Vec = global_used_types // Initialize queue + // --- 3. Build dependency graph and topologically sort types --- + debug!("Building type dependency graph"); + + // Build a dependency map: type -> types it depends on + let mut type_dependencies: HashMap> = HashMap::new(); + let mut needed_types = HashSet::new(); + let mut to_process: Vec = global_used_types .iter() - .filter(|ty| !is_wit_primitive_or_builtin(ty)) // Only custom types + .filter(|ty| !is_wit_primitive_or_builtin(ty)) .cloned() .collect(); - let mut processed_types = HashSet::new(); // Track types processed to avoid cycles/redundancy - // Add primitives/builtins to processed_types initially - for ty in &global_used_types { - if is_wit_primitive_or_builtin(ty) { - processed_types.insert(ty.clone()); + // First pass: collect all needed types and their dependencies + while let Some(type_name) = to_process.pop() { + if needed_types.contains(&type_name) { + continue; } - } - while let Some(type_name_to_find) = types_to_find_queue.pop() { - if processed_types.contains(&type_name_to_find) { - continue; // Already processed or known primitive/builtin + // Check if we have a definition for this type + if let Some(wit_def) = all_type_definitions.get(&type_name) { + needed_types.insert(type_name.clone()); + let mut deps = Vec::new(); + + // Extract nested type dependencies from the WIT definition + // Look for other custom types referenced in this definition + for (other_type_name, _) in &all_type_definitions { + if other_type_name != &type_name && wit_def.contains(other_type_name) { + deps.push(other_type_name.clone()); + if !needed_types.contains(other_type_name) + && !to_process.contains(other_type_name) + { + to_process.push(other_type_name.clone()); + } + } + } + + type_dependencies.insert(type_name.clone(), deps); } + } - debug!(type_name = %type_name_to_find, "Attempting to find definition"); - let mut definition_found_in_project = false; + // Topological sort using Kahn's algorithm + debug!("Performing topological sort of type definitions"); + let mut sorted_types = Vec::new(); + let mut in_degree: HashMap = HashMap::new(); - // Search across all project files for the definition - for file_path in &rust_files { - // Directly propagate errors from find_and_make_wit_type_def - match find_and_make_wit_type_def(file_path, &type_name_to_find, &mut global_used_types)? - { - Some((wit_definition, new_local_deps)) => { - debug!(type_name=%type_name_to_find, file_path=%file_path.display(), "Found definition"); + // Initialize in-degrees + for type_name in &needed_types { + in_degree.insert(type_name.clone(), 0); + } - // Store the definition. Check for duplicates across files. - if let Some(existing_def) = generated_type_defs - .insert(type_name_to_find.clone(), wit_definition.clone()) - { - // Clone wit_definition here - // Simple string comparison might be too strict if formatting differs slightly. - // But good enough for a warning. - if existing_def != wit_definition { - // Compare with the cloned value - warn!(type_name = %type_name_to_find, "Type definition found in multiple files with different generated content. Using the one from: {}", file_path.display()); - } - } - processed_types.insert(type_name_to_find.clone()); // Mark as processed - definition_found_in_project = true; - - // Add newly discovered dependencies from this type's definition to the queue - for dep in new_local_deps { - if !processed_types.contains(&dep) && !types_to_find_queue.contains(&dep) { - debug!(dependency = %dep, discovered_by = %type_name_to_find, "Adding new dependency to find queue"); - types_to_find_queue.push(dep); - } + // Calculate in-degrees + for deps in type_dependencies.values() { + for dep in deps { + if let Some(degree) = in_degree.get_mut(dep) { + *degree += 1; + } + } + } + + // Find all types with in-degree 0 + let mut queue: Vec = in_degree + .iter() + .filter(|(_, °ree)| degree == 0) + .map(|(name, _)| name.clone()) + .collect(); + + // Process queue + while let Some(type_name) = queue.pop() { + sorted_types.push(type_name.clone()); + + // Reduce in-degree of dependent types + if let Some(deps) = type_dependencies.get(&type_name) { + for dep in deps { + if let Some(degree) = in_degree.get_mut(dep) { + *degree -= 1; + if *degree == 0 { + queue.push(dep.clone()); } - // Found the definition for this type, stop searching files for it - break; } - None => continue, // Not in this file, check next file } } - // If after checking all files, the definition wasn't found - if !definition_found_in_project { - debug!(type_name=%type_name_to_find, "Definition not found in any scanned file."); - // Mark as processed to avoid infinite loop. Verification step will catch this. - processed_types.insert(type_name_to_find.clone()); + } + + // Check for cycles + if sorted_types.len() != needed_types.len() { + let missing: Vec = needed_types + .iter() + .filter(|t| !sorted_types.contains(t)) + .cloned() + .collect(); + warn!(missing = ?missing, "Circular dependency detected in type definitions"); + // Add remaining types anyway (WIT might still work) + for t in missing { + sorted_types.push(t); } } - debug!("Finished iterative type definition resolution"); + + debug!(sorted_count = %sorted_types.len(), "Completed topological sort"); // --- 4. Verify All Used Types Have Definitions --- - debug!(final_used_types = ?global_used_types, found_definitions = ?generated_type_defs.keys(), "Starting final verification"); + debug!(final_used_types = ?global_used_types, available_definitions = ?all_type_definitions.keys(), "Starting final verification"); let mut undefined_types = Vec::new(); for used_type_name in &global_used_types { if !is_wit_primitive_or_builtin(used_type_name) - && !generated_type_defs.contains_key(used_type_name) + && !all_type_definitions.contains_key(used_type_name) { warn!(type_name=%used_type_name, "Verification failed: Used type has no generated definition."); undefined_types.push(used_type_name.clone()); @@ -1058,11 +1128,17 @@ fn process_rust_project(project_path: &Path, api_dir: &Path) -> Result = generated_type_defs.into_values().collect(); - all_generated_defs.sort(); // Sort type definitions for consistent output - signature_structs.sort(); // Sort signature records as well + // Use topologically sorted types to ensure definitions come before uses + let mut relevant_defs: Vec = Vec::new(); + for type_name in &sorted_types { + if let Some(def) = all_type_definitions.get(type_name) { + relevant_defs.push(def.clone()); + } + } + // No need to sort again - already in topological order + signature_structs.sort(); // Sort signature records for consistency - if signature_structs.is_empty() && all_generated_defs.is_empty() { + if signature_structs.is_empty() && relevant_defs.is_empty() { // Use the original interface name if available, otherwise fallback let name_for_warning = interface_name.as_deref().unwrap_or(""); warn!(interface_name = %name_for_warning, "No attributed functions or used types requiring definitions found. No WIT interface file generated for this project."); @@ -1079,10 +1155,10 @@ fn process_rust_project(project_path: &Path, api_dir: &Path) -> Result Result Result<(Vec { // Only add import if an interface name was actually generated if !interface.is_empty() { - new_imports.push(format!(" import {interface};")); + let import_wit_ident = to_wit_ident(&interface); + new_imports.push(format!(" import {};", import_wit_ident)); interfaces.push(interface); // Add to list of generated interfaces } else { // Log if processing succeeded but generated no interface content diff --git a/src/new/templates/ui/chat/ui/package.json b/src/new/templates/ui/chat/ui/package.json index ec9ddd50..f1714e41 100644 --- a/src/new/templates/ui/chat/ui/package.json +++ b/src/new/templates/ui/chat/ui/package.json @@ -13,7 +13,7 @@ "preview": "vite preview" }, "dependencies": { - "@hyperware-ai/client-api": "^0.1.0", + "@hyperware-ai/client-api": "^0.1.4", "react": "^18.2.0", "react-dom": "^18.2.0", "zustand": "^4.4.7" diff --git a/src/publish/mod.rs b/src/publish/mod.rs index 216bfa6c..e09a0fa2 100644 --- a/src/publish/mod.rs +++ b/src/publish/mod.rs @@ -98,9 +98,20 @@ pub fn make_remote_link(url: &str, text: &str) -> String { #[instrument(level = "trace", skip_all)] fn calculate_metadata_hash(package_dir: &Path) -> Result { - let metadata_text = fs::read_to_string(package_dir.join("metadata.json"))?; - let hash = keccak256(metadata_text.as_bytes()); - Ok(format!("0x{}", hex::encode(hash))) + let mut metadata_text = fs::read_to_string(package_dir.join("metadata.json"))?; + if !metadata_text.ends_with('\n') { + metadata_text.push('\n'); + } + let hash = keccak_256_hash(metadata_text.as_bytes()); + Ok(hash) +} + +/// generate a Keccak-256 hash string (with 0x prefix) of the metadata bytes +pub fn keccak_256_hash(bytes: &[u8]) -> String { + use sha3::{Digest, Keccak256}; + let mut hasher = Keccak256::new(); + hasher.update(bytes); + format!("0x{:x}", hasher.finalize()) } #[instrument(level = "trace", skip_all)]