From f8a8a1fad8ea14201742d3690eac465dc3443aec Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Fri, 15 Nov 2024 16:32:42 -0800 Subject: [PATCH 1/9] build: add minimum-viable rewrite-before-build --- src/build/mod.rs | 14 ++++++++-- src/build/rewrite.rs | 61 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 73 insertions(+), 2 deletions(-) create mode 100644 src/build/rewrite.rs diff --git a/src/build/mod.rs b/src/build/mod.rs index 1d19e8c..6b5dc05 100644 --- a/src/build/mod.rs +++ b/src/build/mod.rs @@ -29,6 +29,9 @@ use crate::setup::{ use crate::view_api; use crate::KIT_CACHE; +mod rewrite; +use rewrite::copy_and_rewrite_package; + const PY_VENV_NAME: &str = "process_env"; const JAVASCRIPT_SRC_PATH: &str = "src/lib.js"; const PYTHON_SRC_PATH: &str = "src/lib.py"; @@ -1734,7 +1737,9 @@ pub async fn execute( check_process_lib_version(&package_dir.join("Cargo.toml"))?; - let ui_dirs = get_ui_dirs(package_dir, &include, &exclude)?; + let rewritten_dir = copy_and_rewrite_package(package_dir)?; + + let ui_dirs = get_ui_dirs(&rewritten_dir, &include, &exclude)?; if !no_ui && !ui_dirs.is_empty() { if !skip_deps_check { let mut recv_kill = make_fake_kill_chan(); @@ -1749,7 +1754,7 @@ pub async fn execute( if !ui_only { compile_package( - package_dir, + &rewritten_dir, skip_deps_check, features, url, @@ -1766,6 +1771,11 @@ pub async fn execute( .await?; } + if package_dir.join("pkg").exists() { + fs::remove_dir_all(package_dir.join("pkg"))?; + } + copy_dir(rewritten_dir.join("pkg"), package_dir.join("pkg"))?; + let metadata = read_metadata(package_dir)?; let pkg_publisher = make_pkg_publisher(&metadata); let (_zip_filename, hash_string) = zip_pkg(package_dir, &pkg_publisher)?; diff --git a/src/build/rewrite.rs b/src/build/rewrite.rs new file mode 100644 index 0000000..4d0301d --- /dev/null +++ b/src/build/rewrite.rs @@ -0,0 +1,61 @@ +use std::path::{Path, PathBuf}; + +use color_eyre::Result; +use fs_err as fs; +use regex::Regex; +use tracing::{debug, instrument}; + +#[instrument(level = "trace", skip_all)] +pub fn copy_and_rewrite_package(package_dir: &Path) -> Result { + // Create target/rewrite/ directory + let rewrite_dir = package_dir.join("target").join("rewrite"); + if rewrite_dir.exists() { + fs::remove_dir_all(&rewrite_dir)?; + } + fs::create_dir_all(&rewrite_dir)?; + + // Copy package contents + copy_dir_and_rewrite(package_dir, &rewrite_dir)?; + + Ok(rewrite_dir) +} + +#[instrument(level = "trace", skip_all)] +fn copy_dir_and_rewrite(src: &Path, dst: &Path) -> Result<()> { + if !dst.exists() { + fs::create_dir_all(dst)?; + } + + for entry in fs::read_dir(src)? { + let entry = entry?; + let path = entry.path(); + let dest_path = dst.join(entry.file_name()); + + if path.is_dir() { + // Skip target/ directory to avoid recursion + if path.file_name().and_then(|n| n.to_str()) == Some("target") { + continue; + } + copy_dir_and_rewrite(&path, &dest_path)?; + } else { + if path.extension().and_then(|s| s.to_str()) == Some("rs") { + // Rewrite Rust files + let contents = fs::read_to_string(&path)?; + let new_contents = rewrite_rust_file(&contents)?; + debug!("rewrote {}", dest_path.display()); + fs::write(&dest_path, new_contents)?; + } else { + // Copy other files as-is + fs::copy(&path, &dest_path)?; + } + } + } + Ok(()) +} + +#[instrument(level = "trace", skip_all)] +fn rewrite_rust_file(content: &str) -> Result { + let println_re = Regex::new(r#"(\s*)println!\("(.*)"(.*)\)"#)?; + let result = println_re.replace_all(content, r#"${1}println!("hi ${2}"${3})"#); + Ok(result.into_owned()) +} From 3941cb9defbb81e0e1f27aa64dc84dad897940d1 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Tue, 19 Nov 2024 17:39:53 -0800 Subject: [PATCH 2/9] build: add minimum-viable rewrite of Spawn(|| {}) --- Cargo.lock | 2 + Cargo.toml | 2 + src/build/rewrite.rs | 455 ++++++++++++++++++++++++++++++++++++++++--- 3 files changed, 435 insertions(+), 24 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e677082..7bb2fcc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2175,8 +2175,10 @@ dependencies = [ "serde", "serde_json", "sha2", + "thiserror", "tokio", "toml", + "toml_edit 0.22.20", "tracing", "tracing-appender", "tracing-error", diff --git a/Cargo.toml b/Cargo.toml index cad4c00..11bbb8e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -46,6 +46,7 @@ semver = "1.0" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" sha2 = "0.10.8" +thiserror = "1.0" tokio = { version = "1.28", features = [ "macros", "process", @@ -55,6 +56,7 @@ tokio = { version = "1.28", features = [ "time", ] } toml = "0.8" +toml_edit = "0.22" tracing = "0.1" tracing-appender = "0.2" tracing-error = "0.2" diff --git a/src/build/rewrite.rs b/src/build/rewrite.rs index 4d0301d..4ac2fa2 100644 --- a/src/build/rewrite.rs +++ b/src/build/rewrite.rs @@ -1,61 +1,468 @@ +use std::collections::HashMap; use std::path::{Path, PathBuf}; -use color_eyre::Result; +use color_eyre::{eyre::eyre, Result}; use fs_err as fs; use regex::Regex; +use toml_edit; use tracing::{debug, instrument}; +#[derive(Debug, Default)] +struct GeneratedProcesses { + // original process name -> (generated process name -> (wasm path, content)) + processes: HashMap>, +} + +#[derive(Debug, serde::Serialize, serde::Deserialize)] +struct GeneratedProcessesExternal { + // original process name -> (generated process name -> wasm path) + processes: HashMap>, +} + +impl From for GeneratedProcessesExternal { + fn from(input: GeneratedProcesses) -> Self { + let processes = input + .processes + .iter() + .map(|(parent_name, child_to_content)| { + ( + parent_name.to_string(), + child_to_content + .iter() + .map(|(child_name, (path, _content))| { + (child_name.to_string(), path.to_string()) + }) + .collect(), + ) + }) + .collect(); + GeneratedProcessesExternal { processes } + } +} + +#[derive(Debug)] +struct SpawnMatch { + args: String, + body: String, + imports: Vec, + start_pos: usize, + end_pos: usize, +} + +#[derive(Debug)] +struct SpawnInfo { + args: String, // The arguments passed to the spawn closure + body: String, // The body of the spawn closure + imports: Vec, // All imports from the original file + wit_bindgen: String, // `wit_bindgen!()` call +} + +#[derive(Debug, thiserror::Error)] +enum SpawnParseError { + #[error("Parse failed due to malformed imports")] + Imports, + #[error("Spawn parse failed due to malformed closure: no closing pipe in closure")] + NoClosingPipe, + #[error("Spawn parse failed due to malformed closure: no opening brace")] + NoOpeningBrace, + #[error("Spawn parse failed due to malformed closure: unclosed brace")] + UnclosedBrace, + #[error("Spawn parse failed due to malformed closure: unclosed paren")] + UnclosedParen, +} + +fn extract_imports(content: &str) -> Result, SpawnParseError> { + let imports_re = Regex::new(r"use\s+([^;]+);").map_err(|_| SpawnParseError::Imports)?; + Ok(imports_re + .captures_iter(content) + .map(|cap| cap[1].trim().to_string()) + .collect()) +} + +fn extract_wit_bindgen(content: &str) -> Option { + // Look for wit_bindgen::generate! macro + if let Some(start) = content.find("wit_bindgen::generate!") { + let mut brace_count = 0; + let mut in_macro = false; + let mut saw_closing_brace = false; + let mut saw_closing_paren = false; + let mut macro_end = start; + + // Find the closing part of the macro by counting braces + for (i, c) in content[start..].chars().enumerate() { + match c { + '{' => { + brace_count += 1; + in_macro = true; + } + '}' => { + brace_count -= 1; + if in_macro && brace_count == 0 { + saw_closing_brace = true; + } + } + ')' => { + if in_macro && saw_closing_brace && brace_count == 0 { + saw_closing_paren = true; + } + } + ';' => { + if in_macro && saw_closing_brace && saw_closing_paren && brace_count == 0 { + macro_end = start + i + 1; + break; + } + } + _ => {} + } + } + + Some(content[start..macro_end].to_string()) + } else { + None + } +} + +fn parse_spawn_from(input: &str) -> Result<(String, String, usize), SpawnParseError> { + // Skip the "Spawn(|" prefix since we know it's there + let input_after_spawn = &input["Spawn(|".len()..]; + + // Find the closing "|" + let pipe_end = input_after_spawn + .find('|') + .ok_or(SpawnParseError::NoClosingPipe)?; + + // Find the opening "{" + let brace_start = input_after_spawn[pipe_end..] + .find('{') + .ok_or(SpawnParseError::NoOpeningBrace)? + .saturating_add(pipe_end); + + // Find the closing "}" while handling nested braces + let mut brace_count = 1; + let mut brace_end = None; + let mut paren_end = None; + + for (i, c) in input_after_spawn[brace_start + 1..].chars().enumerate() { + match c { + '{' => brace_count += 1, + '}' => { + brace_count -= 1; + if brace_count == 0 { + brace_end = Some(brace_start + 1 + i); + } + } + ')' => { + if brace_count == 0 && brace_end.is_some() { + paren_end = Some(brace_start + 1 + i); + break; + } + } + _ => {} + } + } + + let brace_end = brace_end.ok_or(SpawnParseError::UnclosedBrace)?; + let paren_end = paren_end.ok_or(SpawnParseError::UnclosedParen)?; + + let args = input_after_spawn[..pipe_end].trim().to_string(); + let body = input_after_spawn[brace_start + 1..brace_end] + .trim() + .to_string(); + + // Return the total length consumed so we know where to continue searching + let total_consumed = "Spawn(|".len() + paren_end + 1; + + Ok((args, body, total_consumed)) +} + +fn find_all_spawns(input: &str) -> Result, SpawnParseError> { + let mut results = Vec::new(); + let mut search_from = 0; + let imports = extract_imports(input)?; + + while let Some(spawn_start) = input[search_from..].find("Spawn(|") { + let absolute_start = search_from + spawn_start; + + let (args, body, consumed_len) = parse_spawn_from(&input[absolute_start..])?; + + results.push(SpawnMatch { + args, + body, + imports: imports.clone(), + start_pos: absolute_start, + end_pos: absolute_start + consumed_len, + }); + + search_from = absolute_start + consumed_len; + } + + Ok(results) +} + +#[instrument(level = "trace", skip_all)] +fn generate_worker_process(process_name: &str, spawn_info: &SpawnInfo) -> Result { + let template = format!( + r#"// Generated worker process for {process_name} +{} + +{} + +call_init!(init); +fn init(our: Address) {{ + // Get args from parent + let message = await_message().expect("Failed to get args from parent"); + let args: serde_json::Value = serde_json::from_slice(&message.body()).unwrap(); + + // Execute original spawn body + {} + + // Exit after completion + std::process::exit(0); +}} +"#, + // Add all the original imports + spawn_info + .imports + .iter() + .map(|i| format!("use {i};\n")) + .collect::(), + spawn_info.wit_bindgen, + spawn_info.body + ); + + Ok(template) +} + #[instrument(level = "trace", skip_all)] pub fn copy_and_rewrite_package(package_dir: &Path) -> Result { - // Create target/rewrite/ directory + debug!("Rewriting for {}...", package_dir.display()); let rewrite_dir = package_dir.join("target").join("rewrite"); if rewrite_dir.exists() { fs::remove_dir_all(&rewrite_dir)?; } fs::create_dir_all(&rewrite_dir)?; - // Copy package contents - copy_dir_and_rewrite(package_dir, &rewrite_dir)?; + copy_dir(package_dir, &rewrite_dir)?; + + let mut generated = GeneratedProcesses::default(); + + // Process all Rust files in the copied directory + process_package(&rewrite_dir, &mut generated)?; + + // Create child processes + create_child_processes(&rewrite_dir, &generated)?; + + // Update workspace Cargo.toml + update_workspace_cargo_toml(&rewrite_dir, &generated)?; Ok(rewrite_dir) } +// TODO: factor out with build::mod.rs::copy_dir() #[instrument(level = "trace", skip_all)] -fn copy_dir_and_rewrite(src: &Path, dst: &Path) -> Result<()> { +fn copy_dir(src: impl AsRef, dst: impl AsRef) -> Result<()> { + let src = src.as_ref(); + let dst = dst.as_ref(); if !dst.exists() { fs::create_dir_all(dst)?; } for entry in fs::read_dir(src)? { let entry = entry?; - let path = entry.path(); - let dest_path = dst.join(entry.file_name()); + let src_path = entry.path(); + let dst_path = dst.join(entry.file_name()); - if path.is_dir() { - // Skip target/ directory to avoid recursion - if path.file_name().and_then(|n| n.to_str()) == Some("target") { + if src_path.is_dir() { + if src_path.file_name().and_then(|n| n.to_str()) == Some("target") { continue; } - copy_dir_and_rewrite(&path, &dest_path)?; + copy_dir(&src_path, &dst_path)?; } else { - if path.extension().and_then(|s| s.to_str()) == Some("rs") { - // Rewrite Rust files - let contents = fs::read_to_string(&path)?; - let new_contents = rewrite_rust_file(&contents)?; - debug!("rewrote {}", dest_path.display()); - fs::write(&dest_path, new_contents)?; - } else { - // Copy other files as-is - fs::copy(&path, &dest_path)?; + fs::copy(&src_path, &dst_path)?; + } + } + Ok(()) +} + +#[instrument(level = "trace", skip_all)] +fn create_child_processes(package_dir: &Path, generated: &GeneratedProcesses) -> Result<()> { + for (process_name, workers) in &generated.processes { + for (worker_name, (_, content)) in workers { + let parent_dir = package_dir.join(process_name); + let worker_dir = package_dir.join(worker_name); + + // Copy the source directory structure from parent + let parent_src = parent_dir.join("src"); + let worker_src = worker_dir.join("src"); + debug!("{} {}", parent_src.display(), worker_src.display()); + copy_dir(&parent_src, &worker_src)?; + + // Overwrite lib.rs with our generated content + fs::write(worker_src.join("lib.rs"), content)?; + + // Copy and modify Cargo.toml + let parent_cargo = fs::read_to_string(parent_dir.join("Cargo.toml"))?; + let mut doc = parent_cargo.parse::()?; + + // Update package name to worker name + if let Some(package) = doc.get_mut("package") { + if let Some(name) = package.get_mut("name") { + *name = toml_edit::value(worker_name.as_str()); + } } + + fs::write(worker_dir.join("Cargo.toml"), doc.to_string())?; + } + } + Ok(()) +} + +#[instrument(level = "trace", skip_all)] +fn update_workspace_cargo_toml(package_dir: &Path, generated: &GeneratedProcesses) -> Result<()> { + let cargo_toml_path = package_dir.join("Cargo.toml"); + let cargo_toml = fs::read_to_string(&cargo_toml_path)?; + + // Parse existing TOML + let mut doc = cargo_toml.parse::()?; + + // Get or create workspace section + let workspace = doc.entry("workspace").or_insert(toml_edit::table()); + + // Get or create members array + let members = workspace + .as_table_mut() + .ok_or_else(|| eyre!("workspace is not a table"))? + .entry("members") + .or_insert(toml_edit::array()); + + let members_array = members + .as_array_mut() + .ok_or_else(|| eyre!("members is not an array"))?; + + // Add all worker packages + for workers in generated.processes.values() { + for worker_name in workers.keys() { + members_array.push(worker_name); } } + + // Write back to file + fs::write(cargo_toml_path, doc.to_string())?; + Ok(()) } #[instrument(level = "trace", skip_all)] -fn rewrite_rust_file(content: &str) -> Result { - let println_re = Regex::new(r#"(\s*)println!\("(.*)"(.*)\)"#)?; - let result = println_re.replace_all(content, r#"${1}println!("hi ${2}"${3})"#); - Ok(result.into_owned()) +fn rewrite_rust_file( + process_name: &str, + file_name: &str, + content: &str, + generated: &mut GeneratedProcesses, +) -> Result { + let spawn_matches = find_all_spawns(content)?; + let mut new_content = content.to_string(); + + // Process spawns in reverse order to not invalidate positions + for (i, spawn_match) in spawn_matches.iter().enumerate().rev() { + let worker_name = format!("{process_name}-worker-{i}"); + let wasm_name = format!("{worker_name}.wasm"); + + // Generate worker process + let wit_bindgen = extract_wit_bindgen(content).unwrap_or_else(|| { + // Fallback to default if not found + r#"wit_bindgen::generate!({ + path: "target/wit", + world: "process-v0", +})"# + .to_string() + }); + + let worker_code = generate_worker_process( + file_name, + &SpawnInfo { + args: spawn_match.args.clone(), + body: spawn_match.body.clone(), + imports: spawn_match.imports.clone(), + wit_bindgen, + }, + )?; + + // Track in generated processes + generated + .processes + .entry(process_name.to_string()) + .or_default() + .insert(worker_name.clone(), (wasm_name, worker_code)); + + // Create replacement spawn code + let args = spawn_match + .args + .split(", ") + .map(|s| format!("\"{s}\":{s}")) + .collect::>() + .join(","); + let args = "{".to_string() + &args; + let args = args + "}"; + let replacement = format!( + r#"{{ + use kinode_process_lib::{{spawn, OnExit, Request}}; + let worker = spawn( + None, + &format!("{{}}:{{}}/pkg/{}.wasm", our.process.package_name, our.process.publisher_node), + OnExit::None, + vec![], + vec![], + false, + ).expect("failed to spawn worker"); + Request::to((our.node(), worker)) + .body(serde_json::to_vec(&serde_json::json!({})).unwrap()) + .send() + .expect("failed to initialize worker"); + }}"#, + worker_name, args, + ); + + // Replace in the content using positions + new_content.replace_range(spawn_match.start_pos..spawn_match.end_pos, &replacement); + } + + Ok(new_content) +} + +#[instrument(level = "trace", skip_all)] +fn process_package(package_dir: &Path, generated: &mut GeneratedProcesses) -> Result<()> { + if !package_dir.is_dir() { + return Ok(()); + } + + for entry in fs::read_dir(package_dir)? { + let entry = entry?; + let path = entry.path(); + + if path.is_dir() { + if path.file_name().and_then(|n| n.to_str()) == Some("target") { + continue; + } + process_package(&path, generated)?; + } else if path.extension().and_then(|s| s.to_str()) == Some("rs") { + let process_name = path + .parent() + .and_then(|p| p.parent()) + .and_then(|n| n.file_name()) + .and_then(|n| n.to_str()) + .ok_or_else(|| eyre!("Invalid process name"))? + .to_string(); + + let file_name = path + .file_stem() + .and_then(|n| n.to_str()) + .ok_or_else(|| eyre!("Invalid file name"))? + .to_string(); + + let content = fs::read_to_string(&path)?; + let new_content = rewrite_rust_file(&process_name, &file_name, &content, generated)?; + fs::write(&path, new_content)?; + } + } + Ok(()) } From b548cb69a9600aef33e32d388e13c80174d611df Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Wed, 20 Nov 2024 11:00:11 -0800 Subject: [PATCH 3/9] build: rewrite `Spawn!()` rather than `Spawn()` for happier linter --- src/build/rewrite.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/build/rewrite.rs b/src/build/rewrite.rs index 4ac2fa2..f655413 100644 --- a/src/build/rewrite.rs +++ b/src/build/rewrite.rs @@ -123,8 +123,8 @@ fn extract_wit_bindgen(content: &str) -> Option { } fn parse_spawn_from(input: &str) -> Result<(String, String, usize), SpawnParseError> { - // Skip the "Spawn(|" prefix since we know it's there - let input_after_spawn = &input["Spawn(|".len()..]; + // Skip the "Spawn!(|" prefix since we know it's there + let input_after_spawn = &input["Spawn!(|".len()..]; // Find the closing "|" let pipe_end = input_after_spawn @@ -170,7 +170,7 @@ fn parse_spawn_from(input: &str) -> Result<(String, String, usize), SpawnParseEr .to_string(); // Return the total length consumed so we know where to continue searching - let total_consumed = "Spawn(|".len() + paren_end + 1; + let total_consumed = "Spawn!(|".len() + paren_end + 1; Ok((args, body, total_consumed)) } @@ -180,7 +180,7 @@ fn find_all_spawns(input: &str) -> Result, SpawnParseError> { let mut search_from = 0; let imports = extract_imports(input)?; - while let Some(spawn_start) = input[search_from..].find("Spawn(|") { + while let Some(spawn_start) = input[search_from..].find("Spawn!(|") { let absolute_start = search_from + spawn_start; let (args, body, consumed_len) = parse_spawn_from(&input[absolute_start..])?; From 26ab5fa45cc6446a16246888c6103c313fd023b2 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Wed, 20 Nov 2024 11:39:46 -0800 Subject: [PATCH 4/9] build: make rewriting optional, but default, with `--no-rewrite` flag --- src/build/mod.rs | 28 ++++++++++++++++++++++------ src/build_start_package/mod.rs | 2 ++ src/main.rs | 16 ++++++++++++++++ src/run_tests/mod.rs | 3 +++ 4 files changed, 43 insertions(+), 6 deletions(-) diff --git a/src/build/mod.rs b/src/build/mod.rs index 6b5dc05..5330cc5 100644 --- a/src/build/mod.rs +++ b/src/build/mod.rs @@ -1158,6 +1158,7 @@ async fn fetch_dependencies( default_world: Option<&str>, include: &HashSet, exclude: &HashSet, + no_rewrite: bool, force: bool, verbose: bool, ) -> Result<()> { @@ -1174,6 +1175,7 @@ async fn fetch_dependencies( default_world, vec![], // TODO: what about deps-of-deps? vec![], + no_rewrite, false, force, verbose, @@ -1210,6 +1212,7 @@ async fn fetch_dependencies( default_world, local_dep_deps, vec![], + no_rewrite, false, force, verbose, @@ -1525,6 +1528,7 @@ async fn compile_package( add_paths_to_api: &Vec, include: &HashSet, exclude: &HashSet, + no_rewrite: bool, force: bool, verbose: bool, ignore_deps: bool, // for internal use; may cause problems when adding recursive deps @@ -1547,6 +1551,7 @@ async fn compile_package( default_world, include, exclude, + no_rewrite, force, verbose, ) @@ -1654,6 +1659,7 @@ pub async fn execute( default_world: Option<&str>, local_dependencies: Vec, add_paths_to_api: Vec, + no_rewrite: bool, reproducible: bool, force: bool, verbose: bool, @@ -1737,9 +1743,16 @@ pub async fn execute( check_process_lib_version(&package_dir.join("Cargo.toml"))?; - let rewritten_dir = copy_and_rewrite_package(package_dir)?; + // live_dir is the "dir that is being built" or is "live"; + // if `no_rewrite`, that is just `package_dir`; + // else, it is the modified copy that is in `target/rewrite/` + let live_dir = if no_rewrite { + PathBuf::from(package_dir) + } else { + copy_and_rewrite_package(package_dir)? + }; - let ui_dirs = get_ui_dirs(&rewritten_dir, &include, &exclude)?; + let ui_dirs = get_ui_dirs(&live_dir, &include, &exclude)?; if !no_ui && !ui_dirs.is_empty() { if !skip_deps_check { let mut recv_kill = make_fake_kill_chan(); @@ -1754,7 +1767,7 @@ pub async fn execute( if !ui_only { compile_package( - &rewritten_dir, + &live_dir, skip_deps_check, features, url, @@ -1764,6 +1777,7 @@ pub async fn execute( &add_paths_to_api, &include, &exclude, + no_rewrite, force, verbose, ignore_deps, @@ -1771,10 +1785,12 @@ pub async fn execute( .await?; } - if package_dir.join("pkg").exists() { - fs::remove_dir_all(package_dir.join("pkg"))?; + if !no_rewrite { + if package_dir.join("pkg").exists() { + fs::remove_dir_all(package_dir.join("pkg"))?; + } + copy_dir(live_dir.join("pkg"), package_dir.join("pkg"))?; } - copy_dir(rewritten_dir.join("pkg"), package_dir.join("pkg"))?; let metadata = read_metadata(package_dir)?; let pkg_publisher = make_pkg_publisher(&metadata); diff --git a/src/build_start_package/mod.rs b/src/build_start_package/mod.rs index 2a29546..36eec64 100644 --- a/src/build_start_package/mod.rs +++ b/src/build_start_package/mod.rs @@ -21,6 +21,7 @@ pub async fn execute( default_world: Option<&str>, local_dependencies: Vec, add_paths_to_api: Vec, + no_rewrite: bool, reproducible: bool, force: bool, verbose: bool, @@ -38,6 +39,7 @@ pub async fn execute( default_world, local_dependencies, add_paths_to_api, + no_rewrite, reproducible, force, verbose, diff --git a/src/main.rs b/src/main.rs index f111bfc..c5d1892 100644 --- a/src/main.rs +++ b/src/main.rs @@ -222,6 +222,7 @@ async fn execute( .unwrap_or_default() .map(|s| PathBuf::from(s)) .collect(); + let no_rewrite = matches.get_one::("NO_REWRITE").unwrap(); let reproducible = matches.get_one::("REPRODUCIBLE").unwrap(); let force = matches.get_one::("FORCE").unwrap(); let verbose = matches.get_one::("VERBOSE").unwrap(); @@ -239,6 +240,7 @@ async fn execute( default_world.map(|w| w.as_str()), local_dependencies, add_paths_to_api, + *no_rewrite, *reproducible, *force, *verbose, @@ -283,6 +285,7 @@ async fn execute( .unwrap_or_default() .map(|s| PathBuf::from(s)) .collect(); + let no_rewrite = matches.get_one::("NO_REWRITE").unwrap(); let reproducible = matches.get_one::("REPRODUCIBLE").unwrap(); let force = matches.get_one::("FORCE").unwrap(); let verbose = matches.get_one::("VERBOSE").unwrap(); @@ -300,6 +303,7 @@ async fn execute( default_world.map(|w| w.as_str()), local_dependencies, add_paths_to_api, + *no_rewrite, *reproducible, *force, *verbose, @@ -733,6 +737,12 @@ async fn make_app(current_dir: &std::ffi::OsString) -> Result { .long("add-to-api") .help("Path to file to add to api.zip (can specify multiple times)") ) + .arg(Arg::new("NO_REWRITE") + .action(ArgAction::SetTrue) + .long("no-rewrite") + .help("Don't rewrite the package (disables `Spawn!()`) [default: rewrite]") + .required(false) + ) .arg(Arg::new("REPRODUCIBLE") .action(ArgAction::SetTrue) .short('r') @@ -834,6 +844,12 @@ async fn make_app(current_dir: &std::ffi::OsString) -> Result { .help("Pass these comma-delimited feature flags to Rust cargo builds") .required(false) ) + .arg(Arg::new("NO_REWRITE") + .action(ArgAction::SetTrue) + .long("no-rewrite") + .help("Don't rewrite the package (disables `Spawn!()`) [default: rewrite]") + .required(false) + ) .arg(Arg::new("REPRODUCIBLE") .action(ArgAction::SetTrue) .short('r') diff --git a/src/run_tests/mod.rs b/src/run_tests/mod.rs index 2207903..94b9a3b 100644 --- a/src/run_tests/mod.rs +++ b/src/run_tests/mod.rs @@ -382,6 +382,7 @@ async fn build_packages( false, false, false, + false, ) .await?; debug!("Start {path:?}"); @@ -406,6 +407,7 @@ async fn build_packages( false, false, false, + false, ) .await?; } @@ -427,6 +429,7 @@ async fn build_packages( false, false, false, + false, ) .await?; } From 5434390bea4cf63b421710bdc5140af46b3154ca Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Wed, 20 Nov 2024 17:29:57 -0800 Subject: [PATCH 5/9] build: improve arg passing with struct; destructure into child for max devex --- Cargo.lock | 1 + Cargo.toml | 1 + src/build/rewrite.rs | 169 ++++++++++++++++++++++++++++++------------- src/new/mod.rs | 2 +- 4 files changed, 121 insertions(+), 52 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7bb2fcc..055174b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2175,6 +2175,7 @@ dependencies = [ "serde", "serde_json", "sha2", + "syn 2.0.75", "thiserror", "tokio", "toml", diff --git a/Cargo.toml b/Cargo.toml index 11bbb8e..48a62ed 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -46,6 +46,7 @@ semver = "1.0" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" sha2 = "0.10.8" +syn = { version = "2.0", features = ["full"] } thiserror = "1.0" tokio = { version = "1.28", features = [ "macros", diff --git a/src/build/rewrite.rs b/src/build/rewrite.rs index f655413..2fa4f44 100644 --- a/src/build/rewrite.rs +++ b/src/build/rewrite.rs @@ -4,9 +4,12 @@ use std::path::{Path, PathBuf}; use color_eyre::{eyre::eyre, Result}; use fs_err as fs; use regex::Regex; +use syn::{__private::ToTokens, parse_str}; use toml_edit; use tracing::{debug, instrument}; +use crate::new::snake_to_upper_camel_case; + #[derive(Debug, Default)] struct GeneratedProcesses { // original process name -> (generated process name -> (wasm path, content)) @@ -40,6 +43,12 @@ impl From for GeneratedProcessesExternal { } } +#[derive(Debug)] +struct ArgInfo { + name: String, + ty: String, +} + #[derive(Debug)] struct SpawnMatch { args: String, @@ -49,14 +58,6 @@ struct SpawnMatch { end_pos: usize, } -#[derive(Debug)] -struct SpawnInfo { - args: String, // The arguments passed to the spawn closure - body: String, // The body of the spawn closure - imports: Vec, // All imports from the original file - wit_bindgen: String, // `wit_bindgen!()` call -} - #[derive(Debug, thiserror::Error)] enum SpawnParseError { #[error("Parse failed due to malformed imports")] @@ -71,6 +72,77 @@ enum SpawnParseError { UnclosedParen, } +#[instrument(level = "trace", skip_all)] +fn parse_fn_args(args: &str) -> Result> { + // Parse the argument string as Rust function parameters + let fn_item: syn::ItemFn = parse_str(&format!("fn dummy({args}) {{}}"))?; + + // Extract the parameters from the function signature + let params = fn_item + .sig + .inputs + .into_iter() + .filter_map(|param| { + if let syn::FnArg::Typed(pat_type) = param { + Some(ArgInfo { + name: pat_type.pat.into_token_stream().to_string(), + ty: pat_type.ty.into_token_stream().to_string(), + }) + } else { + None + } + }) + .collect(); + + Ok(params) +} + +fn make_args_struct_name(worker_name: &str) -> String { + format!( + "{}Args", + snake_to_upper_camel_case(&worker_name.replace("-", "_")) + ) +} + +fn generate_args_struct_type(struct_name: &str, args: &[ArgInfo]) -> String { + let fields = args + .iter() + .map(|arg| format!(" {}: {},", arg.name, arg.ty)) + .collect::>() + .join("\n"); + + format!( + r#"#[derive(serde::Serialize, serde::Deserialize)] +struct {struct_name} {{ +{fields} +}}"# + ) +} + +fn generate_args_struct_instance(struct_name: &str, args: &[ArgInfo]) -> String { + let fields = args + .iter() + .map(|arg| format!(" {0}: {0}.clone(),", arg.name)) + .collect::>() + .join("\n"); + + format!( + r#"let args = {struct_name} {{ +{fields} + }};"# + ) +} + +fn generate_args_struct_destructure(struct_name: &str, args: &[ArgInfo]) -> String { + let fields = args + .iter() + .map(|arg| arg.name.clone()) + .collect::>() + .join(", "); + + format!(r#"let {struct_name} {{ {fields} }}"#) +} + fn extract_imports(content: &str) -> Result, SpawnParseError> { let imports_re = Regex::new(r"use\s+([^;]+);").map_err(|_| SpawnParseError::Imports)?; Ok(imports_re @@ -200,34 +272,36 @@ fn find_all_spawns(input: &str) -> Result, SpawnParseError> { } #[instrument(level = "trace", skip_all)] -fn generate_worker_process(process_name: &str, spawn_info: &SpawnInfo) -> Result { +fn generate_worker_process( + process_name: &str, + body: &str, + imports: &[String], + wit_bindgen: &str, + args_type: &str, + args_destructure: &str, +) -> Result { + let imports = imports + .iter() + .map(|i| format!("#[allow(unused_imports)]\nuse {i};\n")) + .collect::(); let template = format!( r#"// Generated worker process for {process_name} -{} +{imports} + +{wit_bindgen} -{} +{args_type} call_init!(init); fn init(our: Address) {{ // Get args from parent let message = await_message().expect("Failed to get args from parent"); - let args: serde_json::Value = serde_json::from_slice(&message.body()).unwrap(); + {args_destructure} = serde_json::from_slice(&message.body()).unwrap(); - // Execute original spawn body - {} - - // Exit after completion - std::process::exit(0); + // Execute `Spawn!()` function body + {body} }} "#, - // Add all the original imports - spawn_info - .imports - .iter() - .map(|i| format!("use {i};\n")) - .collect::(), - spawn_info.wit_bindgen, - spawn_info.body ); Ok(template) @@ -355,7 +429,6 @@ fn update_workspace_cargo_toml(package_dir: &Path, generated: &GeneratedProcesse #[instrument(level = "trace", skip_all)] fn rewrite_rust_file( process_name: &str, - file_name: &str, content: &str, generated: &mut GeneratedProcesses, ) -> Result { @@ -367,6 +440,12 @@ fn rewrite_rust_file( let worker_name = format!("{process_name}-worker-{i}"); let wasm_name = format!("{worker_name}.wasm"); + let args_name = make_args_struct_name(&worker_name); + let parsed_args = parse_fn_args(&spawn_match.args)?; + let args_type = generate_args_struct_type(&args_name, &parsed_args); + let args_instance = generate_args_struct_instance(&args_name, &parsed_args); + let args_destructure = generate_args_struct_destructure(&args_name, &parsed_args); + // Generate worker process let wit_bindgen = extract_wit_bindgen(content).unwrap_or_else(|| { // Fallback to default if not found @@ -378,13 +457,12 @@ fn rewrite_rust_file( }); let worker_code = generate_worker_process( - file_name, - &SpawnInfo { - args: spawn_match.args.clone(), - body: spawn_match.body.clone(), - imports: spawn_match.imports.clone(), - wit_bindgen, - }, + process_name, + &spawn_match.body, + &spawn_match.imports, + &wit_bindgen, + &args_type, + &args_destructure, )?; // Track in generated processes @@ -395,31 +473,26 @@ fn rewrite_rust_file( .insert(worker_name.clone(), (wasm_name, worker_code)); // Create replacement spawn code - let args = spawn_match - .args - .split(", ") - .map(|s| format!("\"{s}\":{s}")) - .collect::>() - .join(","); - let args = "{".to_string() + &args; - let args = args + "}"; let replacement = format!( r#"{{ use kinode_process_lib::{{spawn, OnExit, Request}}; + {args_type} + + {args_instance} + let worker = spawn( None, - &format!("{{}}:{{}}/pkg/{}.wasm", our.process.package_name, our.process.publisher_node), + &format!("{{}}:{{}}/pkg/{worker_name}.wasm", our.process.package_name, our.process.publisher_node), OnExit::None, vec![], vec![], false, ).expect("failed to spawn worker"); Request::to((our.node(), worker)) - .body(serde_json::to_vec(&serde_json::json!({})).unwrap()) + .body(serde_json::to_vec(&args).unwrap()) .send() .expect("failed to initialize worker"); }}"#, - worker_name, args, ); // Replace in the content using positions @@ -453,14 +526,8 @@ fn process_package(package_dir: &Path, generated: &mut GeneratedProcesses) -> Re .ok_or_else(|| eyre!("Invalid process name"))? .to_string(); - let file_name = path - .file_stem() - .and_then(|n| n.to_str()) - .ok_or_else(|| eyre!("Invalid file name"))? - .to_string(); - let content = fs::read_to_string(&path)?; - let new_content = rewrite_rust_file(&process_name, &file_name, &content, generated)?; + let new_content = rewrite_rust_file(&process_name, &content, generated)?; fs::write(&path, new_content)?; } } diff --git a/src/new/mod.rs b/src/new/mod.rs index 681cbf4..60226f4 100644 --- a/src/new/mod.rs +++ b/src/new/mod.rs @@ -73,7 +73,7 @@ impl From<&String> for Template { } } -fn snake_to_upper_camel_case(input: &str) -> String { +pub fn snake_to_upper_camel_case(input: &str) -> String { let parts: Vec<&str> = input.split('_').collect(); let mut camel_case = String::new(); From 69bc1e834c383c8fe5420eae69351cce5ad1fe49 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Wed, 20 Nov 2024 17:30:09 -0800 Subject: [PATCH 6/9] build: restructure rewrite (pure refactor) --- src/build/rewrite.rs | 284 +++++++++++++++++++++---------------------- 1 file changed, 142 insertions(+), 142 deletions(-) diff --git a/src/build/rewrite.rs b/src/build/rewrite.rs index 2fa4f44..34b36df 100644 --- a/src/build/rewrite.rs +++ b/src/build/rewrite.rs @@ -72,6 +72,39 @@ enum SpawnParseError { UnclosedParen, } +// TODO: factor out with build::mod.rs::copy_dir() +#[instrument(level = "trace", skip_all)] +fn copy_dir(src: impl AsRef, dst: impl AsRef) -> Result<()> { + let src = src.as_ref(); + let dst = dst.as_ref(); + if !dst.exists() { + fs::create_dir_all(dst)?; + } + + for entry in fs::read_dir(src)? { + let entry = entry?; + let src_path = entry.path(); + let dst_path = dst.join(entry.file_name()); + + if src_path.is_dir() { + if src_path.file_name().and_then(|n| n.to_str()) == Some("target") { + continue; + } + copy_dir(&src_path, &dst_path)?; + } else { + fs::copy(&src_path, &dst_path)?; + } + } + Ok(()) +} + +fn make_args_struct_name(worker_name: &str) -> String { + format!( + "{}Args", + snake_to_upper_camel_case(&worker_name.replace("-", "_")) + ) +} + #[instrument(level = "trace", skip_all)] fn parse_fn_args(args: &str) -> Result> { // Parse the argument string as Rust function parameters @@ -97,13 +130,6 @@ fn parse_fn_args(args: &str) -> Result> { Ok(params) } -fn make_args_struct_name(worker_name: &str) -> String { - format!( - "{}Args", - snake_to_upper_camel_case(&worker_name.replace("-", "_")) - ) -} - fn generate_args_struct_type(struct_name: &str, args: &[ArgInfo]) -> String { let fields = args .iter() @@ -247,30 +273,6 @@ fn parse_spawn_from(input: &str) -> Result<(String, String, usize), SpawnParseEr Ok((args, body, total_consumed)) } -fn find_all_spawns(input: &str) -> Result, SpawnParseError> { - let mut results = Vec::new(); - let mut search_from = 0; - let imports = extract_imports(input)?; - - while let Some(spawn_start) = input[search_from..].find("Spawn!(|") { - let absolute_start = search_from + spawn_start; - - let (args, body, consumed_len) = parse_spawn_from(&input[absolute_start..])?; - - results.push(SpawnMatch { - args, - body, - imports: imports.clone(), - start_pos: absolute_start, - end_pos: absolute_start + consumed_len, - }); - - search_from = absolute_start + consumed_len; - } - - Ok(results) -} - #[instrument(level = "trace", skip_all)] fn generate_worker_process( process_name: &str, @@ -307,123 +309,28 @@ fn init(our: Address) {{ Ok(template) } -#[instrument(level = "trace", skip_all)] -pub fn copy_and_rewrite_package(package_dir: &Path) -> Result { - debug!("Rewriting for {}...", package_dir.display()); - let rewrite_dir = package_dir.join("target").join("rewrite"); - if rewrite_dir.exists() { - fs::remove_dir_all(&rewrite_dir)?; - } - fs::create_dir_all(&rewrite_dir)?; - - copy_dir(package_dir, &rewrite_dir)?; - - let mut generated = GeneratedProcesses::default(); - - // Process all Rust files in the copied directory - process_package(&rewrite_dir, &mut generated)?; - - // Create child processes - create_child_processes(&rewrite_dir, &generated)?; - - // Update workspace Cargo.toml - update_workspace_cargo_toml(&rewrite_dir, &generated)?; - - Ok(rewrite_dir) -} - -// TODO: factor out with build::mod.rs::copy_dir() -#[instrument(level = "trace", skip_all)] -fn copy_dir(src: impl AsRef, dst: impl AsRef) -> Result<()> { - let src = src.as_ref(); - let dst = dst.as_ref(); - if !dst.exists() { - fs::create_dir_all(dst)?; - } - - for entry in fs::read_dir(src)? { - let entry = entry?; - let src_path = entry.path(); - let dst_path = dst.join(entry.file_name()); - - if src_path.is_dir() { - if src_path.file_name().and_then(|n| n.to_str()) == Some("target") { - continue; - } - copy_dir(&src_path, &dst_path)?; - } else { - fs::copy(&src_path, &dst_path)?; - } - } - Ok(()) -} - -#[instrument(level = "trace", skip_all)] -fn create_child_processes(package_dir: &Path, generated: &GeneratedProcesses) -> Result<()> { - for (process_name, workers) in &generated.processes { - for (worker_name, (_, content)) in workers { - let parent_dir = package_dir.join(process_name); - let worker_dir = package_dir.join(worker_name); - - // Copy the source directory structure from parent - let parent_src = parent_dir.join("src"); - let worker_src = worker_dir.join("src"); - debug!("{} {}", parent_src.display(), worker_src.display()); - copy_dir(&parent_src, &worker_src)?; - - // Overwrite lib.rs with our generated content - fs::write(worker_src.join("lib.rs"), content)?; - - // Copy and modify Cargo.toml - let parent_cargo = fs::read_to_string(parent_dir.join("Cargo.toml"))?; - let mut doc = parent_cargo.parse::()?; - - // Update package name to worker name - if let Some(package) = doc.get_mut("package") { - if let Some(name) = package.get_mut("name") { - *name = toml_edit::value(worker_name.as_str()); - } - } - - fs::write(worker_dir.join("Cargo.toml"), doc.to_string())?; - } - } - Ok(()) -} - -#[instrument(level = "trace", skip_all)] -fn update_workspace_cargo_toml(package_dir: &Path, generated: &GeneratedProcesses) -> Result<()> { - let cargo_toml_path = package_dir.join("Cargo.toml"); - let cargo_toml = fs::read_to_string(&cargo_toml_path)?; - - // Parse existing TOML - let mut doc = cargo_toml.parse::()?; +fn find_all_spawns(input: &str) -> Result, SpawnParseError> { + let mut results = Vec::new(); + let mut search_from = 0; + let imports = extract_imports(input)?; - // Get or create workspace section - let workspace = doc.entry("workspace").or_insert(toml_edit::table()); + while let Some(spawn_start) = input[search_from..].find("Spawn!(|") { + let absolute_start = search_from + spawn_start; - // Get or create members array - let members = workspace - .as_table_mut() - .ok_or_else(|| eyre!("workspace is not a table"))? - .entry("members") - .or_insert(toml_edit::array()); + let (args, body, consumed_len) = parse_spawn_from(&input[absolute_start..])?; - let members_array = members - .as_array_mut() - .ok_or_else(|| eyre!("members is not an array"))?; + results.push(SpawnMatch { + args, + body, + imports: imports.clone(), + start_pos: absolute_start, + end_pos: absolute_start + consumed_len, + }); - // Add all worker packages - for workers in generated.processes.values() { - for worker_name in workers.keys() { - members_array.push(worker_name); - } + search_from = absolute_start + consumed_len; } - // Write back to file - fs::write(cargo_toml_path, doc.to_string())?; - - Ok(()) + Ok(results) } #[instrument(level = "trace", skip_all)] @@ -533,3 +440,96 @@ fn process_package(package_dir: &Path, generated: &mut GeneratedProcesses) -> Re } Ok(()) } + +#[instrument(level = "trace", skip_all)] +fn create_child_processes(package_dir: &Path, generated: &GeneratedProcesses) -> Result<()> { + for (process_name, workers) in &generated.processes { + for (worker_name, (_, content)) in workers { + let parent_dir = package_dir.join(process_name); + let worker_dir = package_dir.join(worker_name); + + // Copy the source directory structure from parent + let parent_src = parent_dir.join("src"); + let worker_src = worker_dir.join("src"); + debug!("{} {}", parent_src.display(), worker_src.display()); + copy_dir(&parent_src, &worker_src)?; + + // Overwrite lib.rs with our generated content + fs::write(worker_src.join("lib.rs"), content)?; + + // Copy and modify Cargo.toml + let parent_cargo = fs::read_to_string(parent_dir.join("Cargo.toml"))?; + let mut doc = parent_cargo.parse::()?; + + // Update package name to worker name + if let Some(package) = doc.get_mut("package") { + if let Some(name) = package.get_mut("name") { + *name = toml_edit::value(worker_name.as_str()); + } + } + + fs::write(worker_dir.join("Cargo.toml"), doc.to_string())?; + } + } + Ok(()) +} + +#[instrument(level = "trace", skip_all)] +fn update_workspace_cargo_toml(package_dir: &Path, generated: &GeneratedProcesses) -> Result<()> { + let cargo_toml_path = package_dir.join("Cargo.toml"); + let cargo_toml = fs::read_to_string(&cargo_toml_path)?; + + // Parse existing TOML + let mut doc = cargo_toml.parse::()?; + + // Get or create workspace section + let workspace = doc.entry("workspace").or_insert(toml_edit::table()); + + // Get or create members array + let members = workspace + .as_table_mut() + .ok_or_else(|| eyre!("workspace is not a table"))? + .entry("members") + .or_insert(toml_edit::array()); + + let members_array = members + .as_array_mut() + .ok_or_else(|| eyre!("members is not an array"))?; + + // Add all worker packages + for workers in generated.processes.values() { + for worker_name in workers.keys() { + members_array.push(worker_name); + } + } + + // Write back to file + fs::write(cargo_toml_path, doc.to_string())?; + + Ok(()) +} + +#[instrument(level = "trace", skip_all)] +pub fn copy_and_rewrite_package(package_dir: &Path) -> Result { + debug!("Rewriting for {}...", package_dir.display()); + let rewrite_dir = package_dir.join("target").join("rewrite"); + if rewrite_dir.exists() { + fs::remove_dir_all(&rewrite_dir)?; + } + fs::create_dir_all(&rewrite_dir)?; + + copy_dir(package_dir, &rewrite_dir)?; + + let mut generated = GeneratedProcesses::default(); + + // Process all Rust files in the copied directory + process_package(&rewrite_dir, &mut generated)?; + + // Create child processes + create_child_processes(&rewrite_dir, &generated)?; + + // Update workspace Cargo.toml + update_workspace_cargo_toml(&rewrite_dir, &generated)?; + + Ok(rewrite_dir) +} From 10a9440195994bce9b28006efa6b8bdf7492ec9b Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Wed, 20 Nov 2024 17:43:30 -0800 Subject: [PATCH 7/9] build: `rustfmt` the codegend files --- src/build/rewrite.rs | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/src/build/rewrite.rs b/src/build/rewrite.rs index 34b36df..33c30a1 100644 --- a/src/build/rewrite.rs +++ b/src/build/rewrite.rs @@ -333,6 +333,8 @@ fn find_all_spawns(input: &str) -> Result, SpawnParseError> { Ok(results) } +/// Rewrites the parent and stores information +/// for writing children in GeneratedProcess. #[instrument(level = "trace", skip_all)] fn rewrite_rust_file( process_name: &str, @@ -409,6 +411,8 @@ fn rewrite_rust_file( Ok(new_content) } +/// For each process in package, rewrite rust files parents +/// and store information for writing children in GeneratedProcess. #[instrument(level = "trace", skip_all)] fn process_package(package_dir: &Path, generated: &mut GeneratedProcesses) -> Result<()> { if !package_dir.is_dir() { @@ -436,6 +440,7 @@ fn process_package(package_dir: &Path, generated: &mut GeneratedProcesses) -> Re let content = fs::read_to_string(&path)?; let new_content = rewrite_rust_file(&process_name, &content, generated)?; fs::write(&path, new_content)?; + crate::build::run_command(std::process::Command::new("rustfmt").arg(&path), false)?; } } Ok(()) @@ -455,7 +460,12 @@ fn create_child_processes(package_dir: &Path, generated: &GeneratedProcesses) -> copy_dir(&parent_src, &worker_src)?; // Overwrite lib.rs with our generated content - fs::write(worker_src.join("lib.rs"), content)?; + let worker_lib = worker_src.join("lib.rs"); + fs::write(&worker_lib, content)?; + crate::build::run_command( + std::process::Command::new("rustfmt").arg(&worker_lib), + false, + )?; // Copy and modify Cargo.toml let parent_cargo = fs::read_to_string(parent_dir.join("Cargo.toml"))?; @@ -522,7 +532,7 @@ pub fn copy_and_rewrite_package(package_dir: &Path) -> Result { let mut generated = GeneratedProcesses::default(); - // Process all Rust files in the copied directory + // Rewrite parents & gather info for writing children process_package(&rewrite_dir, &mut generated)?; // Create child processes From 9bc0dcac164ffea74ddc09c6a4425d4891372248 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Thu, 21 Nov 2024 21:06:48 -0800 Subject: [PATCH 8/9] build: port functions to child when used; allow `Spawn!()` of function OR closure --- Cargo.lock | 1 + Cargo.toml | 4 +- src/build/rewrite.rs | 467 ++++++++++++++++++++++++++++++++++++++++--- 3 files changed, 446 insertions(+), 26 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 055174b..5c872b5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2168,6 +2168,7 @@ dependencies = [ "hex", "kinode_process_lib", "nix 0.27.1", + "proc-macro2", "regex", "reqwest", "rpassword", diff --git a/Cargo.toml b/Cargo.toml index 48a62ed..07ce7e9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -39,6 +39,7 @@ fs-err = "2.11" hex = "0.4" kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib.git", rev = "9ac9e51" } nix = { version = "0.27", features = ["process", "signal", "term"] } +proc-macro2 = "1.0" regex = "1" reqwest = { version = "0.12", features = ["json"] } rpassword = "7" @@ -46,7 +47,8 @@ semver = "1.0" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" sha2 = "0.10.8" -syn = { version = "2.0", features = ["full"] } +syn = { version = "2.0", features = ["full", "visit", "extra-traits"] } +#syn = { version = "2.0", features = ["full", "visit"] } thiserror = "1.0" tokio = { version = "1.28", features = [ "macros", diff --git a/src/build/rewrite.rs b/src/build/rewrite.rs index 33c30a1..e43c450 100644 --- a/src/build/rewrite.rs +++ b/src/build/rewrite.rs @@ -1,10 +1,14 @@ -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use std::path::{Path, PathBuf}; use color_eyre::{eyre::eyre, Result}; use fs_err as fs; use regex::Regex; -use syn::{__private::ToTokens, parse_str}; +use syn::{ + __private::ToTokens, + parse_str, + visit::{self, Visit}, +}; use toml_edit; use tracing::{debug, instrument}; @@ -43,16 +47,40 @@ impl From for GeneratedProcessesExternal { } } -#[derive(Debug)] +#[derive(Debug, Clone)] struct ArgInfo { name: String, ty: String, } +#[derive(Debug, Clone)] +struct ReturnInfo { + ty: String, +} + +#[derive(Debug, Clone)] +struct FnSignature { + args: Vec, + ret: Option, +} + #[derive(Debug)] -struct SpawnMatch { - args: String, +struct FnInfo { + name: String, + signature: FnSignature, body: String, + dependencies: HashSet, +} + +#[derive(Debug)] +enum SpawnType { + Closure { args: String, body: String }, + FnCall { name: String, args: Vec }, +} + +#[derive(Debug)] +struct SpawnMatch { + spawn_type: SpawnType, imports: Vec, start_pos: usize, end_pos: usize, @@ -64,12 +92,26 @@ enum SpawnParseError { Imports, #[error("Spawn parse failed due to malformed closure: no closing pipe in closure")] NoClosingPipe, - #[error("Spawn parse failed due to malformed closure: no opening brace")] + #[error("Spawn parse failed due to malformed closure: no opening brace `{{`")] NoOpeningBrace, - #[error("Spawn parse failed due to malformed closure: unclosed brace")] + #[error("Spawn parse failed due to malformed closure: no opening paren `(`")] + NoOpeningParen, + #[error("Spawn parse failed due to malformed closure: no opening bracket `[`")] + NoOpeningBracket, + #[error("Spawn parse failed due to malformed closure: unclosed brace `{{`")] UnclosedBrace, - #[error("Spawn parse failed due to malformed closure: unclosed paren")] + #[error("Spawn parse failed due to malformed closure: unclosed paren `(`")] UnclosedParen, + #[error("Spawn parse failed due to malformed closure: unclosed bracket` `[`")] + UnclosedBracket, + #[error("Spawn parse failed: malformed function call")] + MalformedFunctionCall, + #[error("Spawn parse failed: no opening paren for arguments")] + UnclosedArgsParen, + #[error("Spawn parse failed: unclosed spawn paren")] + UnclosedSpawnParen, + #[error("Spawn parse failed: must start with `Spawn!(`")] + InvalidSpawnSyntax, } // TODO: factor out with build::mod.rs::copy_dir() @@ -106,12 +148,12 @@ fn make_args_struct_name(worker_name: &str) -> String { } #[instrument(level = "trace", skip_all)] -fn parse_fn_args(args: &str) -> Result> { +fn parse_fn_signature(args: &str) -> Result { // Parse the argument string as Rust function parameters let fn_item: syn::ItemFn = parse_str(&format!("fn dummy({args}) {{}}"))?; // Extract the parameters from the function signature - let params = fn_item + let args = fn_item .sig .inputs .into_iter() @@ -127,7 +169,15 @@ fn parse_fn_args(args: &str) -> Result> { }) .collect(); - Ok(params) + // Extract return type if present + let ret = match fn_item.sig.output { + syn::ReturnType::Default => None, + syn::ReturnType::Type(_, ty) => Some(ReturnInfo { + ty: ty.into_token_stream().to_string(), + }), + }; + + Ok(FnSignature { args, ret }) } fn generate_args_struct_type(struct_name: &str, args: &[ArgInfo]) -> String { @@ -220,7 +270,135 @@ fn extract_wit_bindgen(content: &str) -> Option { } } -fn parse_spawn_from(input: &str) -> Result<(String, String, usize), SpawnParseError> { +#[instrument(level = "trace", skip_all)] +fn extract_functions(content: &str) -> Result> { + let syntax_tree = syn::parse_file(content)?; + let mut functions = HashMap::new(); + + for item in syntax_tree.items { + if let syn::Item::Fn(func) = item { + let name = func.sig.ident.to_string(); + // Extract both args and return type + let signature = FnSignature { + args: func + .sig + .inputs + .iter() + .filter_map(|arg| { + if let syn::FnArg::Typed(pat_type) = arg { + Some(ArgInfo { + name: pat_type.pat.to_token_stream().to_string(), + ty: pat_type.ty.to_token_stream().to_string(), + }) + } else { + None + } + }) + .collect(), + ret: match &func.sig.output { + syn::ReturnType::Default => None, + syn::ReturnType::Type(_, ty) => Some(ReturnInfo { + ty: ty.into_token_stream().to_string(), + }), + }, + }; + + let mut deps = HashSet::new(); + find_fn_calls(&func.block, &mut deps); + + functions.insert( + name.clone(), + FnInfo { + name, + signature, + body: func.block.to_token_stream().to_string(), + dependencies: deps, + }, + ); + } + } + + Ok(functions) +} + +fn find_fn_calls(block: &syn::Block, deps: &mut HashSet) { + fn inspect_expr(expr: &syn::Expr, deps: &mut HashSet) { + match expr { + syn::Expr::Call(call) => { + // Check direct function call + if let syn::Expr::Path(path) = &*call.func { + if let Some(ident) = path.path.get_ident() { + deps.insert(ident.to_string()); + } + } + // Check arguments recursively + for arg in &call.args { + inspect_expr(arg, deps); + } + } + syn::Expr::Macro(mac) => { + // Convert tokens to string and look for function calls + let tokens = mac.mac.tokens.clone(); + let tokens_str = tokens.to_string(); + + // Split on comma and look at each part + for part in tokens_str.split(',') { + // Look for function call pattern: function_name(args) + if let Some(func_name) = part.trim().split('(').next() { + // Ignore format specifiers and other non-function tokens + if !func_name.contains('"') && !func_name.is_empty() { + deps.insert(func_name.trim().to_string()); + } + } + } + + // Still try to parse as expression for other cases + if let Ok(expr) = syn::parse2::(tokens) { + inspect_expr(&expr, deps); + } + } + syn::Expr::Block(block_expr) => { + for stmt in &block_expr.block.stmts { + inspect_stmt(stmt, deps); + } + } + _ => {} + } + } + + fn inspect_stmt(stmt: &syn::Stmt, deps: &mut HashSet) { + match stmt { + syn::Stmt::Expr(expr, _) => inspect_expr(expr, deps), + syn::Stmt::Local(local) => { + if let Some(init) = &local.init { + inspect_expr(&init.expr, deps); + } + } + syn::Stmt::Macro(mac_stmt) => { + if let Ok(expr) = syn::parse2::(mac_stmt.mac.tokens.clone()) { + inspect_expr(&expr, deps); + } else { + // Handle tokens directly for macro statements too + let tokens_str = mac_stmt.mac.tokens.to_string(); + for part in tokens_str.split(',') { + if let Some(func_name) = part.trim().split('(').next() { + if !func_name.contains('"') && !func_name.is_empty() { + deps.insert(func_name.trim().to_string()); + } + } + } + } + } + _ => {} + } + } + + for stmt in &block.stmts { + inspect_stmt(stmt, deps); + } +} + +fn parse_spawn_closure(input: &str) -> Result<(String, String, usize), SpawnParseError> { // Skip the "Spawn!(|" prefix since we know it's there let input_after_spawn = &input["Spawn!(|".len()..]; @@ -273,19 +451,217 @@ fn parse_spawn_from(input: &str) -> Result<(String, String, usize), SpawnParseEr Ok((args, body, total_consumed)) } +fn parse_spawn_fn_call(input: &str) -> Result<(String, Vec, usize), SpawnParseError> { + // Skip the "Spawn!(" prefix + let input_after_spawn = &input["Spawn!(".len()..]; + + // Find the function name (everything up to first '(' or whitespace) + let name_end = input_after_spawn + .find(|c: char| c == '(' || c.is_whitespace()) + .ok_or(SpawnParseError::MalformedFunctionCall)?; + let name = input_after_spawn[..name_end].trim().to_string(); + + // Find opening paren of args + let args_start = input_after_spawn[name_end..] + .find('(') + .ok_or(SpawnParseError::NoOpeningParen)? + .saturating_add(name_end); + + // Find closing paren while handling nested parens + let mut paren_count = 1; + let mut args_end = None; + let mut closing_spawn_paren = None; + + for (i, c) in input_after_spawn[args_start + 1..].chars().enumerate() { + match c { + '(' => paren_count += 1, + ')' => { + paren_count -= 1; + if paren_count == 0 { + args_end = Some(args_start + 1 + i); + } else if paren_count == -1 { + // This is the closing paren of Spawn!(...) + closing_spawn_paren = Some(args_start + 1 + i); + break; + } + } + _ => {} + } + } + + let args_end = args_end.ok_or(SpawnParseError::UnclosedArgsParen)?; + let closing_spawn_paren = closing_spawn_paren.ok_or(SpawnParseError::UnclosedSpawnParen)?; + + // Parse args list by splitting on commas, handling nested stuff + let args_str = input_after_spawn[args_start + 1..args_end].trim(); + let args = split_args(args_str)?; + + // Return total consumed length including both closing parens + let total_consumed = "Spawn!(".len() + closing_spawn_paren + 1; + + Ok((name, args, total_consumed)) +} + +fn split_args(args: &str) -> Result, SpawnParseError> { + let mut result = Vec::new(); + let mut current = String::new(); + let mut paren_count = 0; + let mut brace_count = 0; + let mut bracket_count = 0; + + for c in args.chars() { + match c { + '(' => paren_count += 1, + ')' => paren_count -= 1, + '{' => brace_count += 1, + '}' => brace_count -= 1, + '[' => bracket_count += 1, + ']' => bracket_count -= 1, + ',' if paren_count == 0 && brace_count == 0 && bracket_count == 0 => { + result.push(current.trim().to_string()); + current = String::new(); + continue; + } + _ => {} + } + current.push(c); + } + + if !current.is_empty() { + result.push(current.trim().to_string()); + } + + if paren_count != 0 { + return Err(SpawnParseError::UnclosedParen); + } + if brace_count != 0 { + return Err(SpawnParseError::UnclosedBrace); + } + if bracket_count != 0 { + return Err(SpawnParseError::UnclosedBracket); + } + + Ok(result) +} + +fn parse_spawn_from(input: &str) -> Result<(SpawnType, usize), SpawnParseError> { + if input.starts_with("Spawn!(|") { + // Existing closure parsing logic + let (args, body, consumed) = parse_spawn_closure(&input)?; + Ok((SpawnType::Closure { args, body }, consumed)) + } else if input.starts_with("Spawn!(") { + // Function call parsing logic + debug!("parsing non-closure `Spawn!(`"); + let (name, args, consumed) = parse_spawn_fn_call(&input)?; + Ok((SpawnType::FnCall { name, args }, consumed)) + } else { + Err(SpawnParseError::InvalidSpawnSyntax) + } +} + +fn add_function_and_deps( + name: &str, + functions: &HashMap, + needed: &mut HashSet, +) { + needed.insert(name.to_string()); + if let Some(info) = functions.get(name) { + for dep in &info.dependencies { + add_function_and_deps(dep, functions, needed); + } + } +} + #[instrument(level = "trace", skip_all)] fn generate_worker_process( process_name: &str, - body: &str, + spawn_match: &SpawnMatch, + functions: &HashMap, imports: &[String], wit_bindgen: &str, args_type: &str, args_destructure: &str, ) -> Result { + let mut needed_fns = HashSet::new(); + + // Get return type if it's a function call + let return_type = match &spawn_match.spawn_type { + SpawnType::FnCall { name, .. } => { + if let Some(fn_info) = functions.get(name) { + fn_info.signature.ret.clone() + } else { + None + } + } + SpawnType::Closure { .. } => None, // Closures don't have return types in our context + }; + + // Get list of functions we need to copy + match &spawn_match.spawn_type { + SpawnType::Closure { body, .. } => { + // Parse body to find function calls + // Add braces back before parsing + let block_str = format!("{{{body}}}"); + let syntax_tree = syn::parse_str::(&block_str)?; + // First find direct function calls in the closure + find_fn_calls(&syntax_tree, &mut needed_fns); + debug!("generate_worker_process find_fn_calls needed_fns {needed_fns:?}"); + debug!("{:?}", functions.keys().collect::>()); + // Then recursively add dependencies for each function found + let direct_deps = needed_fns.clone(); // Clone before recursive traversal + debug!("{direct_deps:?}"); + for name in direct_deps { + add_function_and_deps(&name, functions, &mut needed_fns); + } + } + SpawnType::FnCall { name, .. } => { + // Add the called function and its dependencies + debug!("fncall {name}"); + debug!("{:?}", functions.keys().collect::>()); + add_function_and_deps(name, functions, &mut needed_fns); + } + } + debug!("generate_worker_process found deps: {needed_fns:?}"); + let imports = imports .iter() .map(|i| format!("#[allow(unused_imports)]\nuse {i};\n")) .collect::(); + + // Generate function definitions preserving return types + let function_definitions = needed_fns + .iter() + .filter_map(|name| functions.get(name)) + .map(|info| { + let ret_type = info + .signature + .ret + .as_ref() + .map_or("".to_string(), |r| format!(" -> {}", r.ty)); + format!( + "fn {}({}){}{}", + info.name, + info.signature + .args + .iter() + .map(|arg| format!("{}: {}", arg.name, arg.ty)) + .collect::>() + .join(", "), + ret_type, + info.body, + ) + }) + .collect::(); + + debug!("{function_definitions}"); + + let body = match &spawn_match.spawn_type { + SpawnType::Closure { body, .. } => format!("{body};"), + SpawnType::FnCall { name, args } => { + format!("{name}({});", args.join(", ")) + } + }; + let template = format!( r#"// Generated worker process for {process_name} {imports} @@ -294,6 +670,8 @@ fn generate_worker_process( {args_type} +{function_definitions} + call_init!(init); fn init(our: Address) {{ // Get args from parent @@ -314,14 +692,13 @@ fn find_all_spawns(input: &str) -> Result, SpawnParseError> { let mut search_from = 0; let imports = extract_imports(input)?; - while let Some(spawn_start) = input[search_from..].find("Spawn!(|") { + while let Some(spawn_start) = input[search_from..].find("Spawn!(") { let absolute_start = search_from + spawn_start; - let (args, body, consumed_len) = parse_spawn_from(&input[absolute_start..])?; + let (spawn_type, consumed_len) = parse_spawn_from(&input[absolute_start..])?; results.push(SpawnMatch { - args, - body, + spawn_type, imports: imports.clone(), start_pos: absolute_start, end_pos: absolute_start + consumed_len, @@ -343,21 +720,36 @@ fn rewrite_rust_file( ) -> Result { let spawn_matches = find_all_spawns(content)?; let mut new_content = content.to_string(); + let functions = extract_functions(&content)?; + debug!("got functions in {process_name}: {:#?}", functions); // Process spawns in reverse order to not invalidate positions for (i, spawn_match) in spawn_matches.iter().enumerate().rev() { let worker_name = format!("{process_name}-worker-{i}"); let wasm_name = format!("{worker_name}.wasm"); - let args_name = make_args_struct_name(&worker_name); - let parsed_args = parse_fn_args(&spawn_match.args)?; - let args_type = generate_args_struct_type(&args_name, &parsed_args); - let args_instance = generate_args_struct_instance(&args_name, &parsed_args); - let args_destructure = generate_args_struct_destructure(&args_name, &parsed_args); + let (args_name, parsed_signature) = match &spawn_match.spawn_type { + SpawnType::Closure { args, .. } => { + let args_name = make_args_struct_name(&worker_name); + let parsed_signature = parse_fn_signature(args)?; + (args_name, parsed_signature) + } + SpawnType::FnCall { name, args } => { + let fn_info = functions + .get(name) + .ok_or_else(|| eyre!("Function {name} not found in parent"))?; + // For function calls, use the function's argument names but + // with the values supplied in the Spawn!() call + let args_name = make_args_struct_name(&worker_name); + (args_name, fn_info.signature.clone()) + } + }; + + let args_type = generate_args_struct_type(&args_name, &parsed_signature.args); + let args_destructure = generate_args_struct_destructure(&args_name, &parsed_signature.args); // Generate worker process let wit_bindgen = extract_wit_bindgen(content).unwrap_or_else(|| { - // Fallback to default if not found r#"wit_bindgen::generate!({ path: "target/wit", world: "process-v0", @@ -367,7 +759,8 @@ fn rewrite_rust_file( let worker_code = generate_worker_process( process_name, - &spawn_match.body, + spawn_match, + &functions, &spawn_match.imports, &wit_bindgen, &args_type, @@ -381,7 +774,31 @@ fn rewrite_rust_file( .or_default() .insert(worker_name.clone(), (wasm_name, worker_code)); - // Create replacement spawn code + // Create replacement spawn code with appropriate args instantiation + let args_instance = match &spawn_match.spawn_type { + SpawnType::Closure { args, .. } => { + // For closures, use the argument names directly + generate_args_struct_instance(&args_name, &parsed_signature.args) + } + SpawnType::FnCall { args, .. } => { + // For function calls, use the supplied argument values + let fields = parsed_signature + .args + .iter() + .zip(args.iter()) + .map(|(arg, value)| format!(" {}: {},", arg.name, value)) + .collect::>() + .join("\n"); + + format!( + r#"let args = {args_name} {{ +{fields} + }};"# + ) + } + }; + + // Create the replacement code let replacement = format!( r#"{{ use kinode_process_lib::{{spawn, OnExit, Request}}; From cfc7b506de01ca1d46b10fe1bb2d79e159c25838 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Fri, 22 Nov 2024 20:38:17 -0800 Subject: [PATCH 9/9] build: change default to NOT rewrite --- src/build/mod.rs | 20 ++++++++++---------- src/build_start_package/mod.rs | 4 ++-- src/main.rs | 18 +++++++++--------- 3 files changed, 21 insertions(+), 21 deletions(-) diff --git a/src/build/mod.rs b/src/build/mod.rs index 5330cc5..c24c3c6 100644 --- a/src/build/mod.rs +++ b/src/build/mod.rs @@ -1158,7 +1158,7 @@ async fn fetch_dependencies( default_world: Option<&str>, include: &HashSet, exclude: &HashSet, - no_rewrite: bool, + rewrite: bool, force: bool, verbose: bool, ) -> Result<()> { @@ -1175,7 +1175,7 @@ async fn fetch_dependencies( default_world, vec![], // TODO: what about deps-of-deps? vec![], - no_rewrite, + rewrite, false, force, verbose, @@ -1212,7 +1212,7 @@ async fn fetch_dependencies( default_world, local_dep_deps, vec![], - no_rewrite, + rewrite, false, force, verbose, @@ -1528,7 +1528,7 @@ async fn compile_package( add_paths_to_api: &Vec, include: &HashSet, exclude: &HashSet, - no_rewrite: bool, + rewrite: bool, force: bool, verbose: bool, ignore_deps: bool, // for internal use; may cause problems when adding recursive deps @@ -1551,7 +1551,7 @@ async fn compile_package( default_world, include, exclude, - no_rewrite, + rewrite, force, verbose, ) @@ -1659,7 +1659,7 @@ pub async fn execute( default_world: Option<&str>, local_dependencies: Vec, add_paths_to_api: Vec, - no_rewrite: bool, + rewrite: bool, reproducible: bool, force: bool, verbose: bool, @@ -1744,9 +1744,9 @@ pub async fn execute( check_process_lib_version(&package_dir.join("Cargo.toml"))?; // live_dir is the "dir that is being built" or is "live"; - // if `no_rewrite`, that is just `package_dir`; + // if `!rewrite`, that is just `package_dir`; // else, it is the modified copy that is in `target/rewrite/` - let live_dir = if no_rewrite { + let live_dir = if !rewrite { PathBuf::from(package_dir) } else { copy_and_rewrite_package(package_dir)? @@ -1777,7 +1777,7 @@ pub async fn execute( &add_paths_to_api, &include, &exclude, - no_rewrite, + rewrite, force, verbose, ignore_deps, @@ -1785,7 +1785,7 @@ pub async fn execute( .await?; } - if !no_rewrite { + if rewrite { if package_dir.join("pkg").exists() { fs::remove_dir_all(package_dir.join("pkg"))?; } diff --git a/src/build_start_package/mod.rs b/src/build_start_package/mod.rs index 36eec64..d370719 100644 --- a/src/build_start_package/mod.rs +++ b/src/build_start_package/mod.rs @@ -21,7 +21,7 @@ pub async fn execute( default_world: Option<&str>, local_dependencies: Vec, add_paths_to_api: Vec, - no_rewrite: bool, + rewrite: bool, reproducible: bool, force: bool, verbose: bool, @@ -39,7 +39,7 @@ pub async fn execute( default_world, local_dependencies, add_paths_to_api, - no_rewrite, + rewrite, reproducible, force, verbose, diff --git a/src/main.rs b/src/main.rs index c5d1892..f148e74 100644 --- a/src/main.rs +++ b/src/main.rs @@ -222,7 +222,7 @@ async fn execute( .unwrap_or_default() .map(|s| PathBuf::from(s)) .collect(); - let no_rewrite = matches.get_one::("NO_REWRITE").unwrap(); + let rewrite = matches.get_one::("REWRITE").unwrap(); let reproducible = matches.get_one::("REPRODUCIBLE").unwrap(); let force = matches.get_one::("FORCE").unwrap(); let verbose = matches.get_one::("VERBOSE").unwrap(); @@ -240,7 +240,7 @@ async fn execute( default_world.map(|w| w.as_str()), local_dependencies, add_paths_to_api, - *no_rewrite, + *rewrite, *reproducible, *force, *verbose, @@ -285,7 +285,7 @@ async fn execute( .unwrap_or_default() .map(|s| PathBuf::from(s)) .collect(); - let no_rewrite = matches.get_one::("NO_REWRITE").unwrap(); + let rewrite = matches.get_one::("REWRITE").unwrap(); let reproducible = matches.get_one::("REPRODUCIBLE").unwrap(); let force = matches.get_one::("FORCE").unwrap(); let verbose = matches.get_one::("VERBOSE").unwrap(); @@ -303,7 +303,7 @@ async fn execute( default_world.map(|w| w.as_str()), local_dependencies, add_paths_to_api, - *no_rewrite, + *rewrite, *reproducible, *force, *verbose, @@ -737,10 +737,10 @@ async fn make_app(current_dir: &std::ffi::OsString) -> Result { .long("add-to-api") .help("Path to file to add to api.zip (can specify multiple times)") ) - .arg(Arg::new("NO_REWRITE") + .arg(Arg::new("REWRITE") .action(ArgAction::SetTrue) - .long("no-rewrite") - .help("Don't rewrite the package (disables `Spawn!()`) [default: rewrite]") + .long("rewrite") + .help("Rewrite the package (disables `Spawn!()`) [default: don't rewrite]") .required(false) ) .arg(Arg::new("REPRODUCIBLE") @@ -844,10 +844,10 @@ async fn make_app(current_dir: &std::ffi::OsString) -> Result { .help("Pass these comma-delimited feature flags to Rust cargo builds") .required(false) ) - .arg(Arg::new("NO_REWRITE") + .arg(Arg::new("REWRITE") .action(ArgAction::SetTrue) .long("no-rewrite") - .help("Don't rewrite the package (disables `Spawn!()`) [default: rewrite]") + .help("Rewrite the package (disables `Spawn!()`) [default: don't rewrite]") .required(false) ) .arg(Arg::new("REPRODUCIBLE")