From d0b0bdc08f10bbcaad72d2d5c274999bb89b2b2f Mon Sep 17 00:00:00 2001 From: Frank Bell Date: Thu, 20 Jun 2024 01:22:46 +0100 Subject: [PATCH] refactor: improve implementation and test coverage --- Cargo.lock | 7 + Cargo.toml | 1 + crates/pop-cli/src/commands/new/parachain.rs | 9 +- crates/pop-cli/src/commands/up/parachain.rs | 219 +- crates/pop-parachains/Cargo.toml | 1 + crates/pop-parachains/src/lib.rs | 3 +- crates/pop-parachains/src/templates.rs | 4 +- crates/pop-parachains/src/up.rs | 1657 -------------- crates/pop-parachains/src/up/mod.rs | 2099 ++++++++++++++++++ crates/pop-parachains/src/up/parachains.rs | 212 ++ crates/pop-parachains/src/up/relay.rs | 159 ++ crates/pop-parachains/src/up/sourcing.rs | 894 ++++++++ crates/pop-parachains/src/utils/git.rs | 56 +- 13 files changed, 3537 insertions(+), 1784 deletions(-) delete mode 100644 crates/pop-parachains/src/up.rs create mode 100644 crates/pop-parachains/src/up/mod.rs create mode 100644 crates/pop-parachains/src/up/parachains.rs create mode 100644 crates/pop-parachains/src/up/relay.rs create mode 100644 crates/pop-parachains/src/up/sourcing.rs diff --git a/Cargo.lock b/Cargo.lock index b1515d57a..f554ee812 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2888,6 +2888,12 @@ dependencies = [ "regex", ] +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + [[package]] name = "group" version = "0.13.0" @@ -5327,6 +5333,7 @@ dependencies = [ "flate2", "git2", "git2_credentials", + "glob", "indexmap 2.2.6", "mockito", "regex", diff --git a/Cargo.toml b/Cargo.toml index 02c0607cb..8260c9d43 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,6 +26,7 @@ duct = "0.13" env_logger = "0.11.1" flate2 = "1.0.30" git2 = { version = "0.18", features = ["vendored-openssl"] } +glob = "0.3.1" log = "0.4.20" mockito = "1.4.0" predicates = "3.1.0" diff --git a/crates/pop-cli/src/commands/new/parachain.rs b/crates/pop-cli/src/commands/new/parachain.rs index df710a1d9..8739519bf 100644 --- a/crates/pop-cli/src/commands/new/parachain.rs +++ b/crates/pop-cli/src/commands/new/parachain.rs @@ -321,13 +321,8 @@ async fn choose_release(template: &Template) -> Result> { } async fn get_latest_3_releases(repo: &GitHub) -> Result> { - let mut latest_3_releases: Vec = repo - .get_latest_releases() - .await? - .into_iter() - .filter(|r| !r.prerelease) - .take(3) - .collect(); + let mut latest_3_releases: Vec = + repo.releases().await?.into_iter().filter(|r| !r.prerelease).take(3).collect(); repo.get_repo_license().await?; // Get the commit sha for the releases for release in latest_3_releases.iter_mut() { diff --git a/crates/pop-cli/src/commands/up/parachain.rs b/crates/pop-cli/src/commands/up/parachain.rs index 5e0301892..53d78360d 100644 --- a/crates/pop-cli/src/commands/up/parachain.rs +++ b/crates/pop-cli/src/commands/up/parachain.rs @@ -1,5 +1,4 @@ // SPDX-License-Identifier: GPL-3.0 - use crate::style::{style, Theme}; use clap::Args; use cliclack::{ @@ -8,8 +7,8 @@ use cliclack::{ }; use console::{Emoji, Style, Term}; use duct::cmd; -use pop_parachains::{Binary, Error, NetworkNode, Source, Status, Zombienet}; -use std::{fs::remove_dir_all, path::PathBuf, time::Duration}; +use pop_parachains::{Error, IndexSet, NetworkNode, Status, Zombienet}; +use std::{path::PathBuf, time::Duration}; use tokio::time::sleep; #[derive(Args)] @@ -25,8 +24,8 @@ pub(crate) struct ZombienetCommand { /// "v1.11.0"). Defaults to the relay chain version if not specified. #[arg(short, long)] system_parachain: Option, - /// The url of the git repository of a parachain to be used, with branch/release tag specified as #fragment (e.g. 'https://github.com/org/repository#tag'). - /// A specific binary name can also be optionally specified via query string parameter (e.g. 'https://github.com/org/repository?binaryname#tag'), defaulting to the name of the repository when not specified. + /// The url of the git repository of a parachain to be used, with branch/release tag/commit specified as #fragment (e.g. 'https://github.com/org/repository#ref'). + /// A specific binary name can also be optionally specified via query string parameter (e.g. 'https://github.com/org/repository?binaryname#ref'), defaulting to the name of the repository when not specified. #[arg(short, long)] parachain: Option>, /// The command to run after the network has been launched. @@ -45,10 +44,10 @@ impl ZombienetCommand { // Parse arguments let cache = crate::cache()?; let mut zombienet = match Zombienet::new( - cache.clone(), + &cache, &self.file, - self.relay_chain.as_ref(), - self.system_parachain.as_ref(), + self.relay_chain.as_ref().map(|v| v.as_str()), + self.system_parachain.as_ref().map(|v| v.as_str()), self.parachain.as_ref(), ) .await @@ -65,30 +64,14 @@ impl ZombienetCommand { }, }; - // Check if any missing binaries - let missing: Vec<_> = zombienet - .missing_binaries() - .into_iter() - .filter_map(|b| match &b.source { - Source::None | Source::Artifact => None, - _ => Some(b), - }) - .collect(); - if !missing.is_empty() { - let list = style(format!( - "> {}", - missing.iter().map(|b| b.name.clone()).collect::>().join(", ") - )) - .dim() - .to_string(); - log::warning(format!("⚠ī¸ The following binaries specified in the network configuration file cannot be found locally:\n {list}"))?; - Self::source_binaries(missing, &cache, self.verbose).await?; + // Source any missing/stale binaries + if Self::source_binaries(&mut zombienet, &cache, self.verbose).await? { + return Ok(()); } // Finally spawn network and wait for signal to terminate let spinner = cliclack::spinner(); spinner.start("🚀 Launching local network..."); - //tracing_subscriber::fmt().init(); match zombienet.spawn().await { Ok(network) => { let mut result = @@ -155,106 +138,165 @@ impl ZombienetCommand { } async fn source_binaries( - missing: Vec<&Binary>, + zombienet: &mut Zombienet, cache: &PathBuf, verbose: bool, - ) -> anyhow::Result<()> { - // Prompt for automatic sourcing of binaries - let list = style(format!( - "> {}", - missing - .iter() - .map(|binary| { - let version = binary.version(); - if version != "" { - format!("{} {}", binary.name, binary.version()) - } else { - binary.name.clone() - } - }) - .collect::>() - .join(", ") - )) - .dim() - .to_string(); - if !confirm(format!( - "đŸ“Ļ Would you like to source them automatically now? It may take some time...\n {list}")) - .initial_value(true) - .interact()? - { - outro_cancel( - "đŸšĢ Cannot launch the specified network until all required binaries are available.", - )?; - return Ok(()); + ) -> anyhow::Result { + // Check for any missing or stale binaries + let binaries: Vec<_> = zombienet.binaries().filter(|b| !b.exists() || b.stale()).collect(); + if binaries.is_empty() { + return Ok(false); } - log::info(format!( - "ℹī¸ Binaries will be cached at {}", - &cache.to_str().expect("expected local cache is invalid") - ))?; + // Check if any missing binaries + let missing: IndexSet<_> = binaries + .iter() + .filter_map(|b| (!b.exists()).then_some((b.name(), b.version()))) + .collect(); + if !missing.is_empty() { + let list = style(format!( + "> {}", + missing.iter().map(|(name, _)| name.to_string()).collect::>().join(", ") + )) + .dim() + .to_string(); + log::warning(format!("⚠ī¸ The following binaries specified in the network configuration file cannot be found locally:\n {list}"))?; - // Check for pre-existing working directory - let working_dir = cache.join(".src"); - if working_dir.exists() && confirm( - "đŸ“Ļ A previous working directory has been detected. Would you like to remove it now?", - ) + // Prompt for automatic sourcing of binaries + let list = style(format!( + "> {}", + missing + .iter() + .map(|(name, version)| { + if let Some(version) = version { + format!("{name} {version}") + } else { + name.to_string() + } + }) + .collect::>() + .join(", ") + )) + .dim() + .to_string(); + if !confirm(format!( + "đŸ“Ļ Would you like to source them automatically now? It may take some time...\n {list}")) .initial_value(true) - .interact()? { - remove_dir_all(&working_dir)?; + .interact()? + { + outro_cancel( + "đŸšĢ Cannot launch the specified network until all required binaries are available.", + )?; + return Ok(true); + } + } + + // Check if any stale binaries + let stale: IndexSet<_> = binaries + .iter() + .filter_map(|b| (b.stale()).then_some((b.name(), b.version(), b.latest()))) + .collect(); + let mut latest = false; + if !stale.is_empty() { + let list = style(format!( + "> {}", + stale + .iter() + .map(|(name, version, latest)| { + format!( + "{name} {} -> {}", + version.unwrap_or("None"), + latest.unwrap_or("None") + ) + }) + .collect::>() + .join(", ") + )) + .dim() + .to_string(); + log::warning(format!( + "ℹī¸ The following binaries have newer versions available:\n {list}" + ))?; + + latest = confirm( + "đŸ“Ļ Would you like to source them automatically now? It may take some time..." + .to_string(), + ) + .initial_value(true) + .interact()?; + } + + let binaries: Vec<_> = binaries + .into_iter() + .filter(|b| !b.exists() || (latest && b.stale())) + .map(|b| { + if latest && b.stale() { + b.use_latest() + } + b + }) + .collect(); + + if binaries.is_empty() { + return Ok(false); + } + + if binaries.iter().any(|b| !b.local()) { + log::info(format!( + "ℹī¸ Binaries will be cached at {}", + &cache.to_str().expect("expected local cache is invalid") + ))?; } // Source binaries + let release = true; match verbose { true => { let reporter = VerboseReporter; - for binary in missing { - log::info(format!("đŸ“Ļ Sourcing {}...", binary.name))?; + for binary in binaries { + log::info(format!("đŸ“Ļ Sourcing {}...", binary.name()))?; Term::stderr().clear_last_lines(1)?; - if let Err(e) = binary.source(&working_dir, reporter, verbose).await { + if let Err(e) = binary.source(release, &reporter, verbose).await { reporter.update(&format!("Sourcing failed: {e}")); outro_cancel( "đŸšĢ Cannot launch the network until all required binaries are available.", )?; - return Ok(()); + return Ok(true); } } reporter.update(""); }, false => { let multi = multi_progress("đŸ“Ļ Sourcing binaries...".to_string()); - let queue: Vec<_> = missing - .iter() + let queue: Vec<_> = binaries + .into_iter() .map(|binary| { let progress = multi.add(cliclack::spinner()); - progress.start(format!("{}: waiting...", binary.name)); + progress.start(format!("{}: waiting...", binary.name())); (binary, progress) }) .collect(); let mut error = false; - for (binary, progress) in &queue { - let prefix = format!("{}: ", binary.name); - let progress_reporter = ProgressReporter(&prefix, &progress); - if let Err(e) = binary.source(&working_dir, progress_reporter, verbose).await { - progress.error(format!("đŸšĢ {}: {e}", binary.name)); + for (binary, progress) in queue { + let prefix = format!("{}: ", binary.name()); + let progress_reporter = ProgressReporter(prefix, progress); + if let Err(e) = binary.source(release, &progress_reporter, verbose).await { + progress_reporter.1.error(format!("đŸšĢ {}: {e}", binary.name())); error = true; } - progress.stop(format!("✅ {}", binary.name)); + progress_reporter.1.stop(format!("✅ {}", binary.name())); } multi.stop(); if error { outro_cancel( "đŸšĢ Cannot launch the network until all required binaries are available.", )?; - return Ok(()); + return Ok(true); } }, }; - // Remove working directory once completed successfully - if working_dir.exists() { - remove_dir_all(working_dir)? - } - return Ok(()); + return Ok(false); } } @@ -278,10 +320,9 @@ pub(crate) async fn run_custom_command( } /// Reports any observed status updates to a progress bar. -#[derive(Copy, Clone)] -struct ProgressReporter<'a>(&'a str, &'a ProgressBar); +struct ProgressReporter(String, ProgressBar); -impl Status for ProgressReporter<'_> { +impl Status for ProgressReporter { fn update(&self, status: &str) { self.1 .start(&format!("{}{}", self.0, status.replace(" Compiling", "Compiling"))) diff --git a/crates/pop-parachains/Cargo.toml b/crates/pop-parachains/Cargo.toml index ea61e0198..145072c67 100644 --- a/crates/pop-parachains/Cargo.toml +++ b/crates/pop-parachains/Cargo.toml @@ -13,6 +13,7 @@ duct.workspace = true flate2.workspace = true git2.workspace = true git2_credentials.workspace = true +glob.workspace = true strum.workspace = true strum_macros.workspace = true tar.workspace = true diff --git a/crates/pop-parachains/src/lib.rs b/crates/pop-parachains/src/lib.rs index 42048419f..a6cc00285 100644 --- a/crates/pop-parachains/src/lib.rs +++ b/crates/pop-parachains/src/lib.rs @@ -11,10 +11,11 @@ mod utils; pub use build::build_parachain; pub use errors::Error; +pub use indexmap::IndexSet; pub use new_pallet::{create_pallet_template, TemplatePalletConfig}; pub use new_parachain::instantiate_template_dir; pub use templates::{Config, Provider, Template}; -pub use up::{Binary, Source, Status, Zombienet}; +pub use up::{Binary, Status, Zombienet}; pub use utils::git::{Git, GitHub, Release}; pub use utils::helpers::is_initial_endowment_valid; pub use utils::pallet_helpers::resolve_pallet_path; diff --git a/crates/pop-parachains/src/templates.rs b/crates/pop-parachains/src/templates.rs index c0c829c8c..da49c3000 100644 --- a/crates/pop-parachains/src/templates.rs +++ b/crates/pop-parachains/src/templates.rs @@ -1,7 +1,5 @@ // SPDX-License-Identifier: GPL-3.0 -use strum::{ - EnumMessage as EnumMessageT, EnumProperty as EnumPropertyT, VariantArray as VariantArrayT, -}; +use strum::{EnumMessage as _, EnumProperty as _, VariantArray as _}; use strum_macros::{AsRefStr, Display, EnumMessage, EnumProperty, EnumString, VariantArray}; use thiserror::Error; diff --git a/crates/pop-parachains/src/up.rs b/crates/pop-parachains/src/up.rs deleted file mode 100644 index f602b2c67..000000000 --- a/crates/pop-parachains/src/up.rs +++ /dev/null @@ -1,1657 +0,0 @@ -// SPDX-License-Identifier: GPL-3.0 -use crate::{ - errors::Error, - utils::git::{Git, GitHub}, - APP_USER_AGENT, -}; -use duct::cmd; -use flate2::read::GzDecoder; -use indexmap::IndexMap; -use std::{ - fmt::Debug, - fs::{copy, create_dir_all, metadata, read_dir, rename, write, File}, - io::{BufRead, Seek, SeekFrom, Write}, - iter::once, - os::unix::fs::PermissionsExt, - path::{Path, PathBuf}, -}; -use symlink::{remove_symlink_file, symlink_file}; -use tar::Archive; -use tempfile::{tempdir, tempfile, Builder, NamedTempFile}; -use toml_edit::{value, ArrayOfTables, DocumentMut, Formatted, Item, Table, Value}; -use url::Url; -use zombienet_sdk::{Network, NetworkConfig, NetworkConfigExt}; -use zombienet_support::fs::local::LocalFileSystem; - -const POLKADOT: &str = "https://github.com/r0gue-io/polkadot"; -const POLKADOT_DEFAULT_VERSION: &str = "v1.12.0"; -const POP: &str = "https://github.com/r0gue-io/pop-node"; -const POP_DEFAULT_VERSION: &str = "v0.1.0-alpha2"; - -/// Configuration to launch a local network. -pub struct Zombienet { - /// The cache location, used for caching binaries. - cache: PathBuf, - /// The config to be used to launch a network. - network_config: NetworkConfiguration, - /// The configuration required to launch the relay chain. - relay_chain: RelayChain, - /// The configuration required to launch parachains. - parachains: IndexMap, -} - -impl Zombienet { - /// Initialises the configuration for launching a local network. - /// # Arguments - /// - /// * `cache` - location, used for caching binaries - /// * `network_config` - config file to be used to launch a network. - /// * `relay_chain_version` - the specific version used for the relay chain (none will fetch the last one). - /// * `system_parachain_version` - the specific version used for the system chain (none will fetch the last one). - /// * `parachains` - list of parachains url. - pub async fn new( - cache: PathBuf, - network_config: &str, - relay_chain_version: Option<&String>, - system_parachain_version: Option<&String>, - parachains: Option<&Vec>, - ) -> Result { - // Parse network config - let network_config = NetworkConfiguration::from(network_config)?; - // Determine relay and parachain requirements based on arguments and config - let relay_chain = Self::relay_chain(relay_chain_version, &network_config, &cache).await?; - let parachains = Self::parachains( - system_parachain_version.unwrap_or(&relay_chain.binary.version), - parachains, - &network_config, - &cache, - ) - .await?; - Ok(Self { cache, network_config, relay_chain, parachains }) - } - - /// Determines whether any binaries are missing. - pub fn missing_binaries(&self) -> Vec<&Binary> { - let mut missing = Vec::new(); - if !self.relay_chain.binary.path.exists() { - missing.push(&self.relay_chain.binary); - } - for parachain in self.parachains.values().filter(|p| !p.binary.path.exists()) { - missing.push(¶chain.binary); - } - missing - } - - /// Launches the local network. - pub async fn spawn(&mut self) -> Result, Error> { - // Symlink polkadot workers - for worker in &self.relay_chain.workers { - let dest = self.cache.join(&worker.name); - if dest.exists() { - remove_symlink_file(&dest)?; - } - symlink_file(&worker.path, dest)?; - } - - // Load from config and spawn network - let config = self.configure()?; - let path = config.path().to_str().expect("temp config file should have a path").into(); - let network_config = NetworkConfig::load_from_toml(path)?; - Ok(network_config.spawn_native().await?) - } - - // Determine relay chain requirements based on specified version and config - async fn relay_chain( - version: Option<&String>, - network_config: &NetworkConfiguration, - cache: &PathBuf, - ) -> Result { - // Validate config - let relay_chain = network_config.relay_chain()?; - if let Some(command) = - NetworkConfiguration::default_command(relay_chain).and_then(|c| c.as_str()) - { - if command.to_lowercase() != RelayChain::BINARY { - return Err(Error::UnsupportedCommand(format!( - "the relay chain command is unsupported: {command}", - ))); - } - } - if let Some(nodes) = NetworkConfiguration::nodes(relay_chain) { - for node in nodes { - if let Some(command) = NetworkConfiguration::command(node).and_then(|c| c.as_str()) - { - if command.to_lowercase() != RelayChain::BINARY { - return Err(Error::UnsupportedCommand(format!( - "the relay chain command is unsupported: {command}", - ))); - } - } - } - } - - // Default to latest version when none specified - let version = match version { - Some(v) => v.to_string(), - None => Self::latest_polkadot_release().await?, - }; - Ok(RelayChain::new(version, cache)?) - } - - // Determine parachain requirements based on specified version and config - async fn parachains( - system_parachain_version: &str, - parachains: Option<&Vec>, - network_config: &NetworkConfiguration, - cache: &PathBuf, - ) -> Result, Error> { - let Some(tables) = network_config.parachains() else { - return Ok(IndexMap::default()); - }; - - let mut paras = IndexMap::new(); - 'outer: for table in tables.iter() { - let id = table - .get("id") - .and_then(|i| i.as_integer()) - .ok_or(Error::Config("expected `parachain` to have `id`".into()))? as u32; - - let default_command = NetworkConfiguration::default_command(table) - .cloned() - .or_else(|| { - // Check if any collators define command - if let Some(collators) = - table.get("collators").and_then(|p| p.as_array_of_tables()) - { - for collator in collators.iter() { - if let Some(command) = - NetworkConfiguration::command(collator).and_then(|i| i.as_str()) - { - return Some(Item::Value(Value::String(Formatted::new( - command.into(), - )))); - } - } - } - - // Otherwise default to polkadot-parachain - Some(Item::Value(Value::String(Formatted::new("polkadot-parachain".into())))) - }) - .expect("missing default_command set above"); - let Some(command) = default_command.as_str() else { - continue; - }; - let command = command.to_lowercase(); - - // Check if system parachain - if command == Parachain::SYSTEM_CHAIN_BINARY { - paras - .insert(id, Parachain::system_parachain(id, system_parachain_version, &cache)?); - continue; - } - - // Check if pop-node - if command == Parachain::POP_BINARY { - paras.insert(id, Parachain::pop(id, &Self::latest_pop_release().await?, &cache)?); - continue; - } - - // Check if parachain binary source specified as an argument - if let Some(parachains) = parachains { - for parachain in parachains { - let repo = Repository::parse(parachain)?; - if command != repo.package { - continue; - } - - // Check for GitHub repository to be able to download source as an archive - let github = - repo.url.host_str().is_some_and(|h| h.to_lowercase() == "github.com"); - let para = if github { - let github = GitHub::parse(parachain)?; - Parachain::from_github_archive( - id, - github, - repo.reference, - repo.package, - &cache, - )? - } else { - Parachain::from_git(id, repo.url, repo.reference, repo.package, &cache)? - }; - - paras.insert(id, para); - continue 'outer; - } - } - - // Check if command references a local binary - if ["./", "../", "/"].iter().any(|p| command.starts_with(p)) { - paras.insert(id, Parachain::from_local(id, command.into())?); - continue; - } - - return Err(Error::MissingBinary(command)); - } - Ok(paras) - } - - async fn latest_polkadot_release() -> Result { - let repo = GitHub::parse(POLKADOT)?; - match repo.get_latest_releases().await { - Ok(releases) => { - // Fetching latest releases - for release in releases { - if !release.prerelease && release.tag_name.starts_with("polkadot-v") { - return Ok(release - .tag_name - .strip_prefix("polkadot-") - .map_or_else(|| release.tag_name.clone(), |v| v.to_string())); - } - } - // It should never reach this point, but in case we download a default version of polkadot - Ok(POLKADOT_DEFAULT_VERSION.to_string()) - }, - // If an error with GitHub API return the POLKADOT DEFAULT VERSION - Err(_) => Ok(POLKADOT_DEFAULT_VERSION.to_string()), - } - } - - async fn latest_pop_release() -> Result { - let repo = GitHub::parse(POP)?; - match repo.get_latest_releases().await { - Ok(releases) => { - // Fetching latest releases - for release in releases { - return Ok(release - .tag_name - .strip_prefix("polkadot-") - .map_or_else(|| release.tag_name.clone(), |v| v.to_string())); - } - // It should never reach this point, but in case we download a default version of pop - Ok(POP_DEFAULT_VERSION.to_string()) - }, - // If an error with GitHub API return the default version - Err(_) => Ok(POP_DEFAULT_VERSION.to_string()), - } - } - - fn configure(&mut self) -> Result { - self.network_config.configure(&self.relay_chain.binary, &self.parachains) - } -} - -/// The network configuration. -struct NetworkConfiguration(DocumentMut); - -impl NetworkConfiguration { - fn from(path: impl AsRef) -> Result { - let contents = std::fs::read_to_string(&path)?; - let config = contents.parse::().map_err(|err| Error::TomlError(err.into()))?; - let network_config = NetworkConfiguration(config); - network_config.relay_chain()?; - Ok(network_config) - } - - fn relay_chain(&self) -> Result<&Table, Error> { - self.0 - .get("relaychain") - .and_then(|i| i.as_table()) - .ok_or(Error::Config("expected `relaychain`".into())) - } - - fn relay_chain_mut(&mut self) -> Result<&mut Table, Error> { - self.0 - .get_mut("relaychain") - .and_then(|i| i.as_table_mut()) - .ok_or(Error::Config("expected `relaychain`".into())) - } - - fn parachains(&self) -> Option<&ArrayOfTables> { - self.0.get("parachains").and_then(|p| p.as_array_of_tables()) - } - - fn parachains_mut(&mut self) -> Option<&mut ArrayOfTables> { - self.0.get_mut("parachains").and_then(|p| p.as_array_of_tables_mut()) - } - - fn command(config: &Table) -> Option<&Item> { - config.get("command") - } - - fn command_mut(config: &mut Table) -> Option<&mut Item> { - config.get_mut("command") - } - - fn default_command(config: &Table) -> Option<&Item> { - config.get("default_command") - } - - fn nodes(relay_chain: &Table) -> Option<&ArrayOfTables> { - relay_chain.get("nodes").and_then(|i| i.as_array_of_tables()) - } - - fn nodes_mut(relay_chain: &mut Table) -> Option<&mut ArrayOfTables> { - relay_chain.get_mut("nodes").and_then(|i| i.as_array_of_tables_mut()) - } - - // Adapts user provided config file to one that with resolved binary paths and which is compatible with current zombienet-sdk requirements - fn configure( - &mut self, - relay_chain: &Binary, - parachains: &IndexMap, - ) -> Result { - // Add zombienet-sdk specific settings if missing - let settings = self - .0 - .entry("settings") - .or_insert(Item::Table(Table::new())) - .as_table_mut() - .expect("settings created if missing"); - settings - .entry("timeout") - .or_insert(Item::Value(Value::Integer(Formatted::new(1_000)))); - settings - .entry("node_spawn_timeout") - .or_insert(Item::Value(Value::Integer(Formatted::new(300)))); - - // Update relay chain config - let relay_chain_config = self.relay_chain_mut()?; - let relay_path = Self::resolve_path(&relay_chain.path)?; - *relay_chain_config.entry("default_command").or_insert(value(&relay_path)) = - value(&relay_path); - if let Some(nodes) = Self::nodes_mut(relay_chain_config) { - for node in nodes.iter_mut() { - if let Some(command) = NetworkConfiguration::command_mut(node) { - *command = value(&relay_path) - } - } - } - - // Update parachain config - if let Some(tables) = self.parachains_mut() { - for table in tables.iter_mut() { - let id = table - .get("id") - .and_then(|i| i.as_integer()) - .ok_or(Error::Config("expected `parachain` to have `id`".into()))? as u32; - let para = - parachains.get(&id).expect("expected parachain existence due to preprocessing"); - - // Resolve default_command to binary - let path = Self::resolve_path(¶.binary.path)?; - table.insert("default_command", value(&path)); - - // Resolve individual collator command to binary - if let Some(collators) = - table.get_mut("collators").and_then(|p| p.as_array_of_tables_mut()) - { - for collator in collators.iter_mut() { - if let Some(command) = NetworkConfiguration::command_mut(collator) { - *command = value(&path) - } - } - } - } - } - - // Write adapted zombienet config to temp file - let network_config_file = Builder::new().suffix(".toml").tempfile()?; - let path = network_config_file - .path() - .to_str() - .ok_or(Error::Config("temp config file should have a path".into()))?; - write(path, self.0.to_string())?; - Ok(network_config_file) - } - - fn resolve_path(path: &Path) -> Result { - Ok(path - .canonicalize() - .map_err(|_| { - Error::Config(format!("the canonical path of {:?} could not be resolved", path)) - }) - .map(|p| p.to_str().map(|p| p.to_string()))? - .ok_or(Error::Config("the path is invalid".into()))?) - } -} - -/// The configuration required to launch the relay chain. -#[derive(Debug, PartialEq)] -struct RelayChain { - /// The binary used to launch a relay chain node. - binary: Binary, - /// The additional workers required by the relay chain node. - workers: [Binary; 2], -} - -impl RelayChain { - const BINARY: &'static str = "polkadot"; - const WORKERS: [&'static str; 2] = ["polkadot-execute-worker", "polkadot-prepare-worker"]; - fn new(version: impl Into, cache: &Path) -> Result { - let name = Self::BINARY.to_string(); - let version = version.into(); - let path = cache.join(format!("{name}-{version}")); - - let tag = format!("polkadot-{version}"); - let archive = format!("polkadot-{}.tar.gz", target()?); - let source = - Source::Archive { - url: format!("{POLKADOT}/releases/download/{tag}/{archive}"), - contents: once((name.clone(), path.clone())) - .chain(Self::WORKERS.iter().map(|worker| { - (worker.to_string(), cache.join(&format!("{worker}-{version}"))) - })) - .collect(), - }; - - // Add polkadot workers - let workers = Self::WORKERS.map(|worker| { - Binary::new( - worker, - &version, - cache.join(&format!("{worker}-{version}")), - Source::Artifact, - ) - }); - - Ok(RelayChain { binary: Binary { name, version, path, source }, workers }) - } -} - -/// The configuration required to launch a parachain. -#[derive(Debug, PartialEq)] -struct Parachain { - /// The parachain identifier on the local network. - id: u32, - /// The binary used to launch a relay chain node. - binary: Binary, -} - -impl Parachain { - const SYSTEM_CHAIN_BINARY: &'static str = "polkadot-parachain"; - const POP_BINARY: &'static str = "pop-node"; - - fn from_git( - id: u32, - repo: Url, - reference: Option, - package: String, - cache: &Path, - ) -> Result { - // Currently just uses the unversioned package name - let path = cache.join(&package); - let source = Source::Git { - url: repo.clone(), - reference: reference.clone(), - package: package.clone(), - artifacts: vec![(package.clone(), path.clone())], - }; - Ok(Parachain { id, binary: Binary::new(package, String::default(), path, source) }) - } - - fn from_github_archive( - id: u32, - repo: GitHub, - reference: Option, - package: String, - cache: &Path, - ) -> Result { - // Currently just uses the unversioned package name - let path = cache.join(&package); - let url = match reference { - None => format!("https://api.github.com/repos/{}/{}/tarball", repo.org, repo.name), - Some(reference) => { - format!( - "https://github.com/{}/{}/archive/refs/heads/{reference}.tar.gz", - repo.org, repo.name - ) - }, - }; - let source = Source::SourceCodeArchive { - url, - package: package.clone(), - artifacts: vec![(package.clone(), path.clone())], - }; - Ok(Parachain { id, binary: Binary::new(package, String::default(), path, source) }) - } - - fn from_local(id: u32, path: PathBuf) -> Result { - let name = path - .file_name() - .and_then(|f| f.to_str()) - .ok_or(Error::Config(format!("unable to determine file name for {path:?}")))? - .to_string(); - - // Check if package manifest can be found within path - let mut manifest = path.parent(); - while let Some(path) = manifest { - if path.join("Cargo.toml").exists() { - break; - } - manifest = path.parent(); - } - - // Define source accordingly - let source = match manifest { - Some(manifest) => Source::LocalPackage { - manifest: manifest.join("Cargo.toml").to_path_buf(), - name: name.clone(), - }, - None => Source::Local, - }; - - Ok(Parachain { id, binary: Binary::new(name, String::default(), path, source) }) - } - - fn pop(id: u32, version: &str, cache: &Path) -> Result { - let name = Self::POP_BINARY; - let path = cache.join(format!("{name}-{version}")); - let archive = format!("{name}-{}.tar.gz", target()?); - let source = Source::Archive { - url: format!("{POP}/releases/download/{version}/{archive}"), - contents: vec![(name.to_string(), path.clone())], - }; - Ok(Parachain { id, binary: Binary::new(name, version, path, source) }) - } - - fn system_parachain(id: u32, version: &str, cache: &Path) -> Result { - let name = Self::SYSTEM_CHAIN_BINARY; - let path = cache.join(format!("{name}-{version}")); - let tag = format!("polkadot-{version}"); - let archive = format!("polkadot-parachain-{}.tar.gz", target()?); - let source = Source::Archive { - url: format!("{POLKADOT}/releases/download/{tag}/{archive}"), - contents: vec![(name.to_string(), path.clone())], - }; - Ok(Parachain { id, binary: Binary::new(name, version, path, source) }) - } -} - -/// A binary used to launch a node. -#[derive(Debug, Default, PartialEq)] -pub struct Binary { - /// The name of a binary. - pub name: String, - /// The version of the binary. - version: String, - /// The path to the binary, typically a versioned name within the cache. - path: PathBuf, - /// The source of the binary. - pub source: Source, -} - -impl Binary { - pub fn new( - name: impl Into, - version: impl Into, - path: impl Into, - source: Source, - ) -> Self { - Self { name: name.into(), version: version.into(), path: path.into(), source } - } - - /// Sources the binary by either downloading from a url or by cloning a git repository and - /// building locally from the resulting source code. - /// - /// # Arguments - /// - /// * `working_dir` - the working directory to be used - /// * `status` - used to observe status updates - /// * `verbose` - whether verbose output is required - pub async fn source( - &self, - working_dir: &Path, - status: impl Status, - verbose: bool, - ) -> Result<(), Error> { - // Ensure working directory exists - create_dir_all(working_dir)?; - // Download or clone and build from source - match &self.source { - Source::Archive { url, contents } => { - // Download archive - status.update(&format!("Downloading from {url}...")); - let response = reqwest::get(url.as_str()).await?.error_for_status()?; - let mut file = tempfile()?; - file.write_all(&response.bytes().await?)?; - file.seek(SeekFrom::Start(0))?; - // Extract contents - status.update("Extracting from archive..."); - let tar = GzDecoder::new(file); - let mut archive = Archive::new(tar); - let temp_dir = tempdir()?; - let working_dir = temp_dir.path(); - archive.unpack(working_dir)?; - for (name, dest) in contents { - rename(working_dir.join(name), dest)?; - } - status.update("Sourcing complete."); - }, - Source::Git { url, reference, package, artifacts } => { - // Clone repository into working directory - let repository_name = GitHub::name(url)?; - let working_dir = working_dir.join(repository_name); - status.update(&format!("Cloning {url}...")); - Git::clone(url, &working_dir, reference.as_deref())?; - // Build binaries - status.update("Starting build of binary..."); - self.build(&working_dir, package, &artifacts, status, verbose).await?; - }, - Source::LocalPackage { manifest, name } => { - // Build binaries - status.update("Starting build of binary..."); - self.build( - manifest.parent().expect("expected path to package manifest"), - name, - &[], - status, - verbose, - ) - .await?; - }, - Source::SourceCodeArchive { url, package, artifacts } => { - // Download archive (user agent required when using GitHub API) - status.update(&format!("Downloading from {url}...")); - let client = reqwest::ClientBuilder::new().user_agent(APP_USER_AGENT).build()?; - let response = client.get(url).send().await?.error_for_status()?; - let mut file = tempfile()?; - file.write_all(&response.bytes().await?)?; - file.seek(SeekFrom::Start(0))?; - // Extract contents - status.update("Extracting from archive..."); - let tar = GzDecoder::new(file); - let mut archive = Archive::new(tar); - let temp_dir = tempdir()?; - let mut working_dir = temp_dir.path().into(); - archive.unpack(&working_dir)?; - // Prepare archive contents for build - let entries: Vec<_> = - read_dir(&working_dir)?.take(2).filter_map(|x| x.ok()).collect(); - match entries.len() { - 0 => { - return Err(Error::ArchiveError( - "The downloaded archive does not contain any entries.".into(), - )) - }, - 1 => working_dir = entries[0].path(), // Automatically switch to top level directory - _ => {}, // Assume that downloaded archive does not have a top level directory - } - // Build binaries - status.update("Starting build of binary..."); - self.build(&working_dir, package, &artifacts, status, verbose).await?; - status.update("Sourcing complete."); - }, - Source::Url(url) => { - // Download required version of binaries - status.update(&format!("Downloading from {url}...")); - Self::download(&url, &self.path).await?; - }, - Source::None | Source::Artifact | Source::Local => {}, - } - Ok(()) - } - - async fn build( - &self, - working_dir: &Path, - package: &str, - artifacts: &[(String, PathBuf)], - status: impl Status, - verbose: bool, - ) -> Result<(), Error> { - // Build binaries and then copy to cache and target - let command = cmd("cargo", vec!["build", "--release", "-p", package]).dir(working_dir); - match verbose { - false => { - let reader = command.stderr_to_stdout().reader()?; - let mut output = std::io::BufReader::new(reader).lines(); - while let Some(line) = output.next() { - status.update(&line?); - } - }, - true => { - command.run()?; - }, - } - // Copy artifacts required - for (name, dest) in artifacts { - copy(working_dir.join(format!("target/release/{name}")), dest)?; - } - Ok(()) - } - - async fn download(url: &str, dest: &PathBuf) -> Result<(), Error> { - // Download to destination path - let response = reqwest::get(url).await?.error_for_status()?; - let mut file = File::create(&dest)?; - file.write_all(&response.bytes().await?)?; - // Make executable - let mut perms = metadata(dest)?.permissions(); - perms.set_mode(0o755); - std::fs::set_permissions(dest, perms)?; - Ok(()) - } - - pub fn version(&self) -> &str { - &self.version - } -} - -/// The source of a binary. -#[derive(Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] -pub enum Source { - /// No source could be determined. - #[default] - None, - /// An archive for download. - Archive { - /// The url of the archive. - url: String, - /// The contents within the archive which are required. - contents: Vec<(String, PathBuf)>, - }, - /// A build artifact. - Artifact, - /// A git repository. - Git { - /// The url of the repository. - url: Url, - /// If applicable, the branch, tag or commit. - reference: Option, - /// The name of the package to be built. - package: String, - /// Any additional artifacts which are required. - artifacts: Vec<(String, PathBuf)>, - }, - /// A local binary. - Local, - /// A local package. - LocalPackage { - /// The path to the package manifest. - manifest: PathBuf, - /// The name of the package to be built. - name: String, - }, - /// A source code archive for download. - SourceCodeArchive { - /// The url of the source code archive. - url: String, - /// The name of the package to be built. - package: String, - /// Any additional artifacts which are required. - artifacts: Vec<(String, PathBuf)>, - }, - /// A URL for download. - Url(String), -} - -/// A descriptor of a remote repository. -#[derive(Debug, PartialEq)] -struct Repository { - /// The (base) url of the repository. - url: Url, - /// If applicable, the branch or tag to be used. - reference: Option, - /// The name of a package within the repository. Defaults to the repository name. - package: String, -} - -impl Repository { - /// Parses a url in the form of https://github.com/org/repository?package#tag into its component parts. - fn parse(url: &str) -> Result { - let url = Url::parse(url)?; - let package = url.query(); - let reference = url.fragment().map(|f| f.to_string()); - - let mut url = url.clone(); - url.set_query(None); - url.set_fragment(None); - - let package = match package { - Some(b) => b, - None => GitHub::name(&url)?, - } - .to_string(); - - Ok(Self { url, reference, package }) - } -} - -/// Trait for observing status updates. -pub trait Status: Copy { - /// Update the observer with the provided `status`. - fn update(&self, status: &str); -} - -impl Status for () { - // no-op: status updates are ignored - fn update(&self, _: &str) {} -} - -fn target() -> Result<&'static str, Error> { - use std::env::consts::*; - - if OS == "windows" { - return Err(Error::UnsupportedPlatform { arch: ARCH, os: OS }); - } - - match ARCH { - "aarch64" => { - return match OS { - "macos" => Ok("aarch64-apple-darwin"), - _ => Ok("aarch64-unknown-linux-gnu"), - } - }, - "x86_64" | "x86" => { - return match OS { - "macos" => Ok("x86_64-apple-darwin"), - _ => Ok("x86_64-unknown-linux-gnu"), - } - }, - &_ => {}, - } - Err(Error::UnsupportedPlatform { arch: ARCH, os: OS }) -} - -#[cfg(test)] -mod tests { - use super::{super::Release, *}; - use anyhow::Result; - - const CONFIG_FILE_PATH: &str = "../../tests/networks/pop.toml"; - const TESTING_POLKADOT_VERSION: &str = "v1.12.0"; - const POLKADOT_BINARY: &str = "polkadot-v1.12.0"; - const POLKADOT_PARACHAIN_BINARY: &str = "polkadot-parachain-v1.12.0"; - - #[tokio::test] - async fn test_new_zombienet_success() -> Result<()> { - let temp_dir = tempdir().expect("Could not create temp dir"); - let cache = PathBuf::from(temp_dir.path()); - - let zombienet = Zombienet::new( - cache.clone(), - CONFIG_FILE_PATH, - Some(&TESTING_POLKADOT_VERSION.to_string()), - Some(&TESTING_POLKADOT_VERSION.to_string()), - None, - ) - .await?; - - // Check has the binary for Polkadot - let relay_chain = zombienet.relay_chain; - assert_eq!(relay_chain.binary.name, RelayChain::BINARY); - assert_eq!(relay_chain.binary.path, temp_dir.path().join(POLKADOT_BINARY)); - assert_eq!(relay_chain.binary.version, TESTING_POLKADOT_VERSION); - assert!(matches!(relay_chain.binary.source, Source::Archive { .. })); - - // Check has the binary for the System Chain - assert_eq!(zombienet.parachains.len(), 2); - - let system_chain = &zombienet.parachains[0]; - assert_eq!(system_chain.binary.name, Parachain::SYSTEM_CHAIN_BINARY); - assert_eq!(system_chain.binary.path, temp_dir.path().join(POLKADOT_PARACHAIN_BINARY)); - assert_eq!(system_chain.binary.version, TESTING_POLKADOT_VERSION); - assert!(matches!(system_chain.binary.source, Source::Archive { .. })); - - // Check has the binary for Pop - let parachain = &zombienet.parachains[1]; - let latest_release = latest_release(POP).await?.unwrap(); - assert_eq!(parachain.binary.name, "pop-node"); - assert_eq!( - parachain.binary.path, - temp_dir.path().join(format!("pop-node-{}", latest_release.tag_name)) - ); - assert_eq!(parachain.binary.version, latest_release.tag_name); - assert!(matches!(parachain.binary.source, Source::Archive { .. })); - - Ok(()) - } - - #[tokio::test] - async fn test_new_fails_wrong_config_no_para_id() -> Result<()> { - let temp_dir = tempdir().expect("Could not create temp dir"); - let cache = PathBuf::from(temp_dir.path()); - - let toml_file = generate_wrong_config_no_para_id(&temp_dir) - .expect("Error generating the testing toml file"); - let toml_file_path = - toml_file.to_str().expect("Error generating the path of the testing toml file"); - - let result_error = Zombienet::new( - cache.clone(), - toml_file_path, - Some(&TESTING_POLKADOT_VERSION.to_string()), - Some(&TESTING_POLKADOT_VERSION.to_string()), - Some(&vec![POP.to_string()]), - ) - .await; - - assert!(result_error.is_err()); - let error_message = result_error.err().unwrap(); - assert_eq!( - error_message.to_string(), - "Configuration error: expected `parachain` to have `id`" - ); - - Ok(()) - } - - #[tokio::test] - async fn test_relay_chain() -> Result<()> { - let temp_dir = tempdir().expect("Could not create temp dir"); - let cache = PathBuf::from(temp_dir.path()); - - let config = NetworkConfiguration::from(CONFIG_FILE_PATH)?; - - let relay_chain = - Zombienet::relay_chain(Some(&TESTING_POLKADOT_VERSION.to_string()), &config, &cache) - .await? - .binary; - - assert_eq!(relay_chain.name, RelayChain::BINARY); - assert_eq!(relay_chain.path, temp_dir.path().join(POLKADOT_BINARY)); - assert_eq!(relay_chain.version, TESTING_POLKADOT_VERSION); - assert!(matches!(relay_chain.source, Source::Archive { .. })); - - Ok(()) - } - - #[tokio::test] - async fn test_relay_chain_no_specifying_version() -> Result<()> { - let temp_dir = tempdir().expect("Could not create temp dir"); - let cache = PathBuf::from(temp_dir.path()); - - let config = NetworkConfiguration::from(CONFIG_FILE_PATH)?; - - // Ideally here we will Mock GitHub struct and its get_latest_release function response - let relay_chain = Zombienet::relay_chain(None, &config, &cache).await?.binary; - - assert_eq!(relay_chain.name, RelayChain::BINARY); - assert!(relay_chain.version.starts_with("v")); - assert!(matches!(relay_chain.source, Source::Archive { .. })); - - Ok(()) - } - - #[tokio::test] - async fn test_relay_chain_fails_wrong_config() -> Result<()> { - let temp_dir = tempdir()?; - let path = generate_wrong_config_no_relay(&temp_dir)?; - assert!(matches!( - NetworkConfiguration::from(path), - Err(Error::Config(message)) if message == "expected `relaychain`")); - Ok(()) - } - - #[tokio::test] - async fn test_latest_polkadot_release() -> Result<()> { - let version = Zombienet::latest_polkadot_release().await?; - // Result will change all the time to the current version (e.g: v1.9.0), check at least starts with v - assert!(version.starts_with("v")); - Ok(()) - } - - #[tokio::test] - async fn test_system_parachain() -> Result<()> { - let temp_dir = tempdir().expect("Could not create temp dir"); - let cache = PathBuf::from(temp_dir.path()); - - let system_chain = - Parachain::system_parachain(1000, &TESTING_POLKADOT_VERSION.to_string(), &cache)? - .binary; - - assert_eq!(system_chain.name, Parachain::SYSTEM_CHAIN_BINARY); - assert_eq!(system_chain.path, temp_dir.path().join(POLKADOT_PARACHAIN_BINARY)); - assert_eq!(system_chain.version, TESTING_POLKADOT_VERSION); - assert!(matches!(system_chain.source, Source::Archive { .. })); - - Ok(()) - } - - #[tokio::test] - async fn test_parachain() -> Result<()> { - let temp_dir = tempdir().expect("Could not create temp dir"); - let cache = PathBuf::from(temp_dir.path()); - - let repo = Repository::parse(POP)?; - let parachain = - Parachain::from_git(2000, repo.url, repo.reference, repo.package, &cache)?.binary; - - assert_eq!(parachain.name, "pop-node"); - assert_eq!(parachain.path, temp_dir.path().join("pop-node")); - assert_eq!(parachain.version, ""); - assert!(matches!(parachain.source, Source::Git { .. })); - - Ok(()) - } - - #[tokio::test] - async fn test_missing_binaries() -> Result<()> { - let temp_dir = tempdir().expect("Could not create temp dir"); - let cache = PathBuf::from(temp_dir.path()); - - let zombienet = Zombienet::new( - cache.clone(), - CONFIG_FILE_PATH, - Some(&TESTING_POLKADOT_VERSION.to_string()), - Some(&TESTING_POLKADOT_VERSION.to_string()), - Some(&vec![POP.to_string()]), - ) - .await?; - - let missing_binaries = zombienet.missing_binaries(); - assert_eq!(missing_binaries.len(), 3); - - Ok(()) - } - - #[tokio::test] - async fn test_missing_binaries_no_missing() -> Result<()> { - let temp_dir = tempdir().expect("Could not create temp dir"); - let cache = PathBuf::from(temp_dir.path()); - - // Create "fake" binary files - let relay_chain_file_path = temp_dir.path().join(POLKADOT_BINARY); - File::create(relay_chain_file_path)?; - let system_chain_file_path = temp_dir.path().join(POLKADOT_PARACHAIN_BINARY); - File::create(system_chain_file_path)?; - let latest_release = latest_release(POP).await?.unwrap(); - let pop_file_path = temp_dir.path().join(format!("pop-node-{}", latest_release.tag_name)); - File::create(pop_file_path)?; - - let zombienet = Zombienet::new( - cache.clone(), - CONFIG_FILE_PATH, - Some(&TESTING_POLKADOT_VERSION.to_string()), - Some(&TESTING_POLKADOT_VERSION.to_string()), - None, - ) - .await?; - - let missing_binaries = zombienet.missing_binaries(); - assert_eq!(missing_binaries.len(), 0); - - Ok(()) - } - - #[tokio::test] - async fn test_configure_zombienet() -> Result<()> { - let temp_dir = tempdir().expect("Could not create temp dir"); - let cache = PathBuf::from(temp_dir.path()); - - let mut zombienet = Zombienet::new( - cache.clone(), - CONFIG_FILE_PATH, - Some(&TESTING_POLKADOT_VERSION.to_string()), - Some(&TESTING_POLKADOT_VERSION.to_string()), - None, - ) - .await?; - - File::create(cache.join(format!("{}-{TESTING_POLKADOT_VERSION}", RelayChain::BINARY)))?; - File::create( - cache.join(format!("{}-{TESTING_POLKADOT_VERSION}", Parachain::SYSTEM_CHAIN_BINARY)), - )?; - let latest_release = latest_release(POP).await?.unwrap(); - File::create(cache.join(format!("pop-node-{}", latest_release.tag_name)))?; - - zombienet.configure()?; - Ok(()) - } - - #[tokio::test] - async fn test_spawn_error_no_binaries() -> Result<()> { - let temp_dir = tempdir().expect("Could not create temp dir"); - let cache = PathBuf::from(temp_dir.path()); - - let mut zombienet = Zombienet::new( - cache.clone(), - CONFIG_FILE_PATH, - Some(&TESTING_POLKADOT_VERSION.to_string()), - Some(&TESTING_POLKADOT_VERSION.to_string()), - Some(&vec![POP.to_string()]), - ) - .await?; - - let spawn = zombienet.spawn().await; - assert!(spawn.is_err()); - - Ok(()) - } - - #[tokio::test] - async fn test_source_url() -> Result<()> { - let temp_dir = tempdir()?; - let cache = PathBuf::from(temp_dir.path()); - - let binary = Binary::new("polkadot", TESTING_POLKADOT_VERSION, - cache.join(POLKADOT_BINARY), Source::Url( - "https://github.com/paritytech/polkadot-sdk/releases/download/polkadot-v1.12.0/polkadot" - .to_string(), - )); - let working_dir = tempdir()?; - binary.source(&working_dir.path(), (), false).await?; - assert!(temp_dir.path().join(POLKADOT_BINARY).exists()); - - Ok(()) - } - - fn generate_wrong_config_no_para_id(temp_dir: &tempfile::TempDir) -> Result { - let file_path = temp_dir.path().join("wrong_config_no_para_id.toml"); - let mut file = File::create(file_path.clone())?; - writeln!( - file, - r#" - [relaychain] - chain = "rococo-local" - - [[relaychain.nodes]] - name = "alice" - validator = true - - [[parachains]] - default_command = "pop-node" - - [[parachains.collators]] - name = "pop" - "# - )?; - Ok(file_path) - } - - fn generate_wrong_config_no_relay(temp_dir: &tempfile::TempDir) -> Result { - let file_path = temp_dir.path().join("wrong_config_no_para_id.toml"); - let mut file = File::create(file_path.clone())?; - writeln!( - file, - r#" - [[parachains]] - id = 1000 - chain = "asset-hub-rococo-local" - - [[parachains.collators]] - name = "asset-hub" - - [[parachains]] - id = 4385 - default_command = "pop-node" - - [[parachains.collators]] - name = "pop" - "# - )?; - Ok(file_path) - } - - async fn latest_release(url: &str) -> Result> { - let releases = GitHub::parse(url)?.get_latest_releases().await?; - Ok(releases.into_iter().nth(0)) - } - - #[test] - fn target_works() -> Result<()> { - use std::{process::Command, str}; - let output = Command::new("rustc").arg("-vV").output()?; - let output = str::from_utf8(&output.stdout)?; - let target = output - .lines() - .find(|l| l.starts_with("host: ")) - .map(|l| &l[6..]) - .unwrap() - .to_string(); - assert_eq!(super::target()?, target); - Ok(()) - } - - mod network_config { - use super::{Binary, Error, NetworkConfiguration, Parachain}; - use std::fs::create_dir_all; - use std::{ - fs::File, - io::{Read, Write}, - path::PathBuf, - }; - use tempfile::{tempdir, Builder}; - - #[test] - fn initialising_from_file_fails_when_missing() { - assert!(NetworkConfiguration::from(PathBuf::new()).is_err()); - } - - #[test] - fn initialising_from_file_fails_when_malformed() -> Result<(), Error> { - let config = Builder::new().suffix(".toml").tempfile()?; - writeln!(config.as_file(), "[")?; - assert!(matches!(NetworkConfiguration::from(config.path()), Err(Error::TomlError(..)))); - Ok(()) - } - - #[test] - fn initialising_from_file_fails_when_relaychain_missing() -> Result<(), Error> { - let config = Builder::new().suffix(".toml").tempfile()?; - assert!(matches!(NetworkConfiguration::from(config.path()), Err(Error::Config(..)))); - Ok(()) - } - - #[test] - fn initialises_relay_from_file() -> Result<(), Error> { - let config = Builder::new().suffix(".toml").tempfile()?; - writeln!( - config.as_file(), - r#" - [relaychain] - chain = "rococo-local" - default_command = "polkadot" - [[relaychain.nodes]] - name = "alice" - "# - )?; - let network_config = NetworkConfiguration::from(config.path())?; - let relay_chain = network_config.relay_chain()?; - assert_eq!("rococo-local", relay_chain["chain"].as_str().unwrap()); - assert_eq!( - "polkadot", - NetworkConfiguration::default_command(relay_chain).unwrap().as_str().unwrap() - ); - let nodes = NetworkConfiguration::nodes(relay_chain).unwrap(); - assert_eq!("alice", nodes.get(0).unwrap()["name"].as_str().unwrap()); - assert!(network_config.parachains().is_none()); - Ok(()) - } - - #[test] - fn initialises_parachains_from_file() -> Result<(), Error> { - let config = Builder::new().suffix(".toml").tempfile()?; - writeln!( - config.as_file(), - r#" - [relaychain] - chain = "rococo-local" - [[parachains]] - id = 2000 - default_command = "node" - "# - )?; - let network_config = NetworkConfiguration::from(config.path())?; - let parachains = network_config.parachains().unwrap(); - let para_2000 = parachains.get(0).unwrap(); - assert_eq!(2000, para_2000["id"].as_integer().unwrap()); - assert_eq!( - "node", - NetworkConfiguration::default_command(para_2000).unwrap().as_str().unwrap() - ); - Ok(()) - } - - #[test] - fn configure_works() -> Result<(), Error> { - let config = Builder::new().suffix(".toml").tempfile()?; - writeln!( - config.as_file(), - r#" -[relaychain] -chain = "rococo-local" - -[[relaychain.nodes]] -name = "alice" -command = "polkadot" - -[[parachains]] -id = 1000 -chain = "asset-hub-rococo-local" - -[[parachains.collators]] -name = "asset-hub" -command = "polkadot-parachain" - -[[parachains]] -id = 2000 -default_command = "pop-node" - -[[parachains.collators]] -name = "pop" -command = "pop-node" - -[[parachains]] -id = 2001 -default_command = "./target/release/parachain-template-node" - -[[parachains.collators]] -name = "collator" -command = "./target/release/parachain-template-node" -"# - )?; - let mut network_config = NetworkConfiguration::from(config.path())?; - - let relay_chain_binary = Builder::new().tempfile()?; - let relay_chain = relay_chain_binary.path(); - File::create(&relay_chain)?; - let system_chain_binary = Builder::new().tempfile()?; - let system_chain = system_chain_binary.path(); - File::create(&system_chain)?; - let pop_binary = Builder::new().tempfile()?; - let pop = pop_binary.path(); - File::create(&pop)?; - let parachain_template_node = Builder::new().tempfile()?; - let parachain_template = parachain_template_node.path(); - create_dir_all(parachain_template.parent().unwrap())?; - File::create(¶chain_template)?; - - let mut configured = network_config.configure( - &Binary { path: relay_chain.to_path_buf(), ..Default::default() }, - &[ - ( - 1000, - Parachain { - id: 1000, - binary: Binary { - path: system_chain.to_path_buf(), - ..Default::default() - }, - }, - ), - ( - 2000, - Parachain { - id: 2000, - binary: Binary { path: pop.to_path_buf(), ..Default::default() }, - }, - ), - ( - 2001, - Parachain { - id: 2001, - binary: Binary { - path: parachain_template.to_path_buf(), - ..Default::default() - }, - }, - ), - ] - .into(), - )?; - assert_eq!("toml", configured.path().extension().unwrap()); - - let mut contents = String::new(); - configured.read_to_string(&mut contents)?; - println!("{contents}"); - assert_eq!( - contents, - format!( - r#" -[relaychain] -chain = "rococo-local" -default_command = "{0}" - -[[relaychain.nodes]] -name = "alice" -command = "{0}" - -[[parachains]] -id = 1000 -chain = "asset-hub-rococo-local" -default_command = "{1}" - -[[parachains.collators]] -name = "asset-hub" -command = "{1}" - -[[parachains]] -id = 2000 -default_command = "{2}" - -[[parachains.collators]] -name = "pop" -command = "{2}" - -[[parachains]] -id = 2001 -default_command = "{3}" - -[[parachains.collators]] -name = "collator" -command = "{3}" - -[settings] -timeout = 1000 -node_spawn_timeout = 300 - -"#, - relay_chain.canonicalize()?.to_str().unwrap(), - system_chain.canonicalize()?.to_str().unwrap(), - pop.canonicalize()?.to_str().unwrap(), - parachain_template.canonicalize()?.to_str().unwrap() - ) - ); - Ok(()) - } - - #[test] - fn resolves_path() -> Result<(), Error> { - let working_dir = tempdir()?; - let path = working_dir.path().join("./target/release/node"); - assert!( - matches!(NetworkConfiguration::resolve_path(&path), Err(Error::Config(message)) - if message == format!("the canonical path of {:?} could not be resolved", path) - ) - ); - - create_dir_all(path.parent().unwrap())?; - File::create(&path)?; - assert_eq!( - NetworkConfiguration::resolve_path(&path)?, - path.canonicalize()?.to_str().unwrap().to_string() - ); - Ok(()) - } - } - - mod relay_chain { - use super::{ - target, Binary, Error, GitHub, RelayChain, Source, POLKADOT, POLKADOT_DEFAULT_VERSION, - }; - use tempfile::tempdir; - use url::Url; - - #[test] - fn initialises_for_download() -> Result<(), Error> { - let version = POLKADOT_DEFAULT_VERSION; - let cache = tempdir()?; - let binary = RelayChain::BINARY; - let repo = Url::parse(POLKADOT)?; - let source = Source::Archive { - url: format!( - "{}-{}.tar.gz", - GitHub::release(&repo, &format!("polkadot-{version}"), binary), - target()? - ), - contents: vec![ - ("polkadot".to_string(), cache.path().join(format!("polkadot-{version}"))), - ( - "polkadot-execute-worker".to_string(), - cache.path().join(format!("polkadot-execute-worker-{version}")), - ), - ( - "polkadot-prepare-worker".to_string(), - cache.path().join(format!("polkadot-prepare-worker-{version}")), - ), - ], - }; - let workers = RelayChain::WORKERS.map(|worker| { - Binary::new( - worker, - POLKADOT_DEFAULT_VERSION, - cache.path().join(format!("{worker}-{version}")), - Source::Artifact, - ) - }); - - assert_eq!( - RelayChain::new(version, cache.path())?, - RelayChain { - binary: Binary::new( - binary, - POLKADOT_DEFAULT_VERSION, - cache.path().join(format!("{binary}-{version}")), - source - ), - workers - } - ); - Ok(()) - } - } - - mod parachain { - use super::{ - target, Binary, Error, GitHub, Parachain, Repository, Source, POLKADOT, - POLKADOT_DEFAULT_VERSION, POP, - }; - use std::{fs::File, path::PathBuf}; - use tempfile::tempdir; - use url::Url; - - #[test] - fn initialises_from_git() -> Result<(), Error> { - let repo = Repository::parse(POP)?; - let cache = tempdir()?; - assert_eq!( - Parachain::from_git( - 2000, - repo.url.clone(), - repo.reference.clone(), - repo.package.clone(), - cache.path() - )?, - Parachain { - id: 2000, - binary: Binary { - name: "pop-node".into(), - version: String::default(), - path: cache.path().join("pop-node"), - source: Source::Git { - url: repo.url, - reference: repo.reference, - package: repo.package, - artifacts: vec![( - "pop-node".to_string(), - cache.path().join("pop-node") - )], - }, - } - } - ); - Ok(()) - } - - #[test] - fn initialises_from_local_binary() -> Result<(), Error> { - let working_dir = tempdir()?; - let command = PathBuf::from("/target/release/node"); - assert_eq!( - Parachain::from_local(2000, command.clone())?, - Parachain { - id: 2000, - binary: Binary { - name: "node".into(), - version: String::default(), - path: working_dir.path().join(&command), - source: Source::Local, - } - } - ); - Ok(()) - } - - #[test] - fn initialises_from_local_package() -> Result<(), Error> { - let working_dir = tempdir()?; - let command = working_dir.path().join("./target/release/node"); - let manifest = working_dir.path().join("Cargo.toml"); - File::create(&manifest)?; - assert_eq!( - Parachain::from_local(2000, command.clone())?, - Parachain { - id: 2000, - binary: Binary { - name: "node".into(), - version: String::default(), - path: working_dir.path().join(&command), - source: Source::LocalPackage { manifest, name: "node".to_string() }, - } - } - ); - Ok(()) - } - - #[test] - fn initialises_system_parachain_for_download() -> Result<(), Error> { - let version = POLKADOT_DEFAULT_VERSION; - let cache = tempdir()?; - let binary = Parachain::SYSTEM_CHAIN_BINARY; - let repo = Url::parse(POLKADOT)?; - assert_eq!( - Parachain::system_parachain(1000, version, cache.path())?, - Parachain { - id: 1000, - binary: Binary { - name: binary.into(), - version: version.into(), - path: cache.path().join(format!("{binary}-{version}")), - source: Source::Archive { - url: format!( - "{}-{}.tar.gz", - GitHub::release(&repo, &format!("polkadot-{version}"), binary), - target()? - ), - contents: vec![( - "polkadot-parachain".to_string(), - cache.path().join(format!("{binary}-{version}")) - )] - }, - } - } - ); - Ok(()) - } - } - - mod repository { - use super::{Error, Repository}; - use url::Url; - - #[test] - fn parsing_full_url_works() { - assert_eq!( - Repository::parse("https://github.com/org/repository?package#tag").unwrap(), - Repository { - url: Url::parse("https://github.com/org/repository").unwrap(), - reference: Some("tag".into()), - package: "package".into(), - } - ); - } - - #[test] - fn parsing_simple_url_works() { - let url = "https://github.com/org/repository"; - assert_eq!( - Repository::parse(url).unwrap(), - Repository { - url: Url::parse(url).unwrap(), - reference: None, - package: "repository".into(), - } - ); - } - - #[test] - fn parsing_invalid_url_returns_error() { - assert!(matches!( - Repository::parse("github.com/org/repository"), - Err(Error::ParseError(..)) - )); - } - } -} diff --git a/crates/pop-parachains/src/up/mod.rs b/crates/pop-parachains/src/up/mod.rs new file mode 100644 index 000000000..ef9e702a1 --- /dev/null +++ b/crates/pop-parachains/src/up/mod.rs @@ -0,0 +1,2099 @@ +// SPDX-License-Identifier: GPL-3.0 +use crate::{errors::Error, utils::git::GitHub}; +use glob::glob; +use indexmap::IndexMap; +use sourcing::{GitHub::*, Source, Source::*}; +use std::{ + fmt::Debug, + fs::write, + iter::once, + path::{Path, PathBuf}, +}; +use symlink::{remove_symlink_file, symlink_file}; +use tempfile::{Builder, NamedTempFile}; +use toml_edit::{value, ArrayOfTables, DocumentMut, Formatted, Item, Table, Value}; +use url::Url; +use zombienet_sdk::{Network, NetworkConfig, NetworkConfigExt}; +use zombienet_support::fs::local::LocalFileSystem; + +mod parachains; +mod relay; +mod sourcing; + +/// Configuration to launch a local network. +pub struct Zombienet { + /// The config to be used to launch a network. + network_config: NetworkConfiguration, + /// The configuration required to launch the relay chain. + relay_chain: RelayChain, + /// The configuration required to launch parachains. + parachains: IndexMap, +} + +impl Zombienet { + /// Initializes the configuration for launching a local network. + /// + /// # Arguments + /// * `cache` - The location used for caching binaries. + /// * `network_config` - The configuration file to be used to launch a network. + /// * `relay_chain_version` - The specific version used for the relay chain (`None` will use the latest available version). + /// * `system_parachain_version` - The specific version used for the system chain (`None` will use the latest available version). + /// * `parachains` - The parachain(s) specified. + pub async fn new( + cache: &Path, + network_config: &str, + relay_chain_version: Option<&str>, + system_parachain_version: Option<&str>, + parachains: Option<&Vec>, + ) -> Result { + // Parse network config + let network_config = NetworkConfiguration::from(network_config)?; + // Determine relay and parachain requirements based on arguments and config + let relay_chain = Self::relay_chain(relay_chain_version, &network_config, cache).await?; + let parachains = match parachains { + Some(parachains) => Some( + parachains + .iter() + .map(|url| Repository::parse(url)) + .collect::, _>>()?, + ), + None => None, + }; + let parachains = Self::parachains( + &relay_chain, + system_parachain_version, + parachains, + &network_config, + cache, + ) + .await?; + Ok(Self { network_config, relay_chain, parachains }) + } + + /// The binaries required to launch the network. + pub fn binaries(&mut self) -> impl Iterator { + once::<&mut Binary>(&mut self.relay_chain.binary) + .chain(self.parachains.values_mut().map(|p| &mut p.binary)) + } + + /// Determine parachain configuration based on specified version and network configuration. + /// + /// # Arguments + /// * `relay_chain` - The configuration required to launch the relay chain. + /// * `system_parachain_version` - The specific version used for the system chain (`None` will use the latest available version). + /// * `parachains` - The parachain repositories specified. + /// * `network_config` - The network configuration to be used to launch a network. + /// * `cache` - The location used for caching binaries. + async fn parachains( + relay_chain: &RelayChain, + system_parachain_version: Option<&str>, + parachains: Option>, + network_config: &NetworkConfiguration, + cache: &Path, + ) -> Result, Error> { + let Some(tables) = network_config.parachains() else { + return Ok(IndexMap::default()); + }; + + let mut paras: IndexMap = IndexMap::new(); + 'outer: for table in tables { + let id = table + .get("id") + .and_then(|i| i.as_integer()) + .ok_or_else(|| Error::Config("expected `parachain` to have `id`".into()))? + as u32; + + let command = NetworkConfiguration::default_command(table) + .cloned() + .or_else(|| { + // Check if any collators define command + if let Some(collators) = + table.get("collators").and_then(|p| p.as_array_of_tables()) + { + for collator in collators.iter() { + if let Some(command) = + NetworkConfiguration::command(collator).and_then(|i| i.as_str()) + { + return Some(Item::Value(Value::String(Formatted::new( + command.into(), + )))); + } + } + } + + // Otherwise default to polkadot-parachain + Some(Item::Value(Value::String(Formatted::new("polkadot-parachain".into())))) + }) + .expect("missing default_command set above") + .as_str() + .expect("expected parachain command to be a string") + .to_lowercase(); + + // Check if system parachain + if let Some(parachain) = parachains::system( + id, + &command, + system_parachain_version, + &relay_chain.binary.version().expect("expected relay chain to have version"), + cache, + ) + .await? + { + paras.insert(id, parachain); + continue; + } + + // Check if known parachain + let version = parachains.as_ref().and_then(|r| { + r.iter() + .filter_map(|r| (r.package == command).then(|| r.reference.as_ref()).flatten()) + .nth(0) + .map(|v| v.as_str()) + }); + if let Some(parachain) = parachains::from(id, &command, version, cache).await? { + paras.insert(id, parachain); + continue; + } + + // Check if parachain binary source specified as an argument + if let Some(parachains) = parachains.as_ref() { + for repo in parachains.iter().filter(|r| command == r.package) { + paras.insert(id, Parachain::from_repository(id, repo, cache)?); + continue 'outer; + } + } + + // Check if command references a local binary + if ["./", "../", "/"].iter().any(|p| command.starts_with(p)) { + paras.insert(id, Parachain::from_local(id, command.into())?); + continue; + } + + return Err(Error::MissingBinary(command)); + } + Ok(paras) + } + + /// Determines relay chain configuration based on specified version and network configuration. + /// + /// # Arguments + /// * `version` - The specific version used for the relay chain (`None` will use the latest available version). + /// * `network_config` - The network configuration to be used to launch a network. + /// * `cache` - The location used for caching binaries. + async fn relay_chain( + version: Option<&str>, + network_config: &NetworkConfiguration, + cache: &Path, + ) -> Result { + // Attempt to determine relay from default_command + let relay_chain = network_config.relay_chain()?; + if let Some(default_command) = + NetworkConfiguration::default_command(relay_chain).and_then(|c| c.as_str()) + { + let relay = relay::from(default_command, version, cache).await?; + // Validate any node config is supported + if let Some(nodes) = NetworkConfiguration::nodes(relay_chain) { + for node in nodes { + if let Some(command) = + NetworkConfiguration::command(node).and_then(|c| c.as_str()) + { + if command.to_lowercase() != relay.binary.name() { + return Err(Error::UnsupportedCommand(format!( + "the relay chain command is unsupported: {command}", + ))); + } + } + } + } + return Ok(relay); + } + // Attempt to determine from nodes + if let Some(nodes) = NetworkConfiguration::nodes(relay_chain) { + let mut relay: Option = None; + for node in nodes { + if let Some(command) = NetworkConfiguration::command(node).and_then(|c| c.as_str()) + { + match &relay { + Some(relay) => { + if command.to_lowercase() != relay.binary.name() { + return Err(Error::UnsupportedCommand(format!( + "the relay chain command is unsupported: {command}", + ))); + } + }, + None => { + relay = Some(relay::from(command, version, cache).await?); + }, + } + } + } + if let Some(relay) = relay { + return Ok(relay); + } + } + // Otherwise use default + return Ok(relay::default(version, cache).await?); + } + + /// Launches the local network. + pub async fn spawn(&mut self) -> Result, Error> { + // Symlink polkadot workers + let relay_chain_binary_path = self.relay_chain.binary.path(); + if !relay_chain_binary_path.exists() { + return Err(Error::MissingBinary(self.relay_chain.binary.name().to_string())); + } + let cache = relay_chain_binary_path + .parent() + .expect("expected relay chain binary path to exist"); + let version = self.relay_chain.binary.version().ok_or_else(|| { + Error::MissingBinary(format!( + "Could not determine version for `{}` binary", + self.relay_chain.binary.name() + )) + })?; + for worker in &self.relay_chain.workers { + let dest = cache.join(worker); + if dest.exists() { + remove_symlink_file(&dest)?; + } + symlink_file(cache.join(format!("{worker}-{version}")), dest)?; + } + + // Load from config and spawn network + let config = self.network_config.configure(&self.relay_chain, &self.parachains)?; + let path = config.path().to_str().expect("temp config file should have a path").into(); + let network_config = NetworkConfig::load_from_toml(path)?; + Ok(network_config.spawn_native().await?) + } +} + +/// The network configuration. +struct NetworkConfiguration(DocumentMut); + +impl NetworkConfiguration { + /// Initializes the network configuration from the specified file. + /// + /// # Arguments + /// * `file` - The network configuration file. + fn from(file: impl AsRef) -> Result { + let contents = std::fs::read_to_string(&file)?; + let config = contents.parse::().map_err(|err| Error::TomlError(err.into()))?; + let network_config = NetworkConfiguration(config); + network_config.relay_chain()?; + Ok(network_config) + } + + /// Returns the `relaychain` configuration. + fn relay_chain(&self) -> Result<&Table, Error> { + self.0 + .get("relaychain") + .and_then(|i| i.as_table()) + .ok_or_else(|| Error::Config("expected `relaychain`".into())) + } + + /// Returns the `relaychain` configuration. + fn relay_chain_mut(&mut self) -> Result<&mut Table, Error> { + self.0 + .get_mut("relaychain") + .and_then(|i| i.as_table_mut()) + .ok_or_else(|| Error::Config("expected `relaychain`".into())) + } + + /// Returns the `parachains` configuration. + fn parachains(&self) -> Option<&ArrayOfTables> { + self.0.get("parachains").and_then(|p| p.as_array_of_tables()) + } + + /// Returns the `parachains` configuration. + fn parachains_mut(&mut self) -> Option<&mut ArrayOfTables> { + self.0.get_mut("parachains").and_then(|p| p.as_array_of_tables_mut()) + } + + /// Returns the `command` configuration. + fn command(config: &Table) -> Option<&Item> { + config.get("command") + } + + /// Returns the `command` configuration. + fn command_mut(config: &mut Table) -> Option<&mut Item> { + config.get_mut("command") + } + + /// Returns the `default_command` configuration. + fn default_command(config: &Table) -> Option<&Item> { + config.get("default_command") + } + + /// Returns the `nodes` configuration. + fn nodes(relay_chain: &Table) -> Option<&ArrayOfTables> { + relay_chain.get("nodes").and_then(|i| i.as_array_of_tables()) + } + + /// Returns the `nodes` configuration. + fn nodes_mut(relay_chain: &mut Table) -> Option<&mut ArrayOfTables> { + relay_chain.get_mut("nodes").and_then(|i| i.as_array_of_tables_mut()) + } + + /// Adapts user provided configuration file to one with resolved binary paths and which is + /// compatible with current zombienet-sdk requirements. + /// + /// # Arguments + /// * `relay_chain` - The configuration required to launch the relay chain. + /// * `parachains` - The configuration required to launch the parachain(s). + fn configure( + &mut self, + relay_chain: &RelayChain, + parachains: &IndexMap, + ) -> Result { + // Add zombienet-sdk specific settings if missing + let settings = self + .0 + .entry("settings") + .or_insert(Item::Table(Table::new())) + .as_table_mut() + .expect("settings created if missing"); + settings + .entry("timeout") + .or_insert(Item::Value(Value::Integer(Formatted::new(1_000)))); + settings + .entry("node_spawn_timeout") + .or_insert(Item::Value(Value::Integer(Formatted::new(300)))); + + // Update relay chain config + let relay_chain_config = self.relay_chain_mut()?; + let relay_chain_binary_path = Self::resolve_path(&relay_chain.binary.path())?; + *relay_chain_config + .entry("default_command") + .or_insert(value(&relay_chain_binary_path)) = value(&relay_chain_binary_path); + if let Some(nodes) = Self::nodes_mut(relay_chain_config) { + for node in nodes.iter_mut() { + if let Some(command) = NetworkConfiguration::command_mut(node) { + *command = value(&relay_chain_binary_path) + } + } + } + + // Update parachain config + if let Some(tables) = self.parachains_mut() { + for table in tables.iter_mut() { + let id = table + .get("id") + .and_then(|i| i.as_integer()) + .ok_or_else(|| Error::Config("expected `parachain` to have `id`".into()))? + as u32; + let para = + parachains.get(&id).expect("expected parachain existence due to preprocessing"); + + // Resolve default_command to binary + let path = Self::resolve_path(¶.binary.path())?; + table.insert("default_command", value(&path)); + + // Resolve individual collator command to binary + if let Some(collators) = + table.get_mut("collators").and_then(|p| p.as_array_of_tables_mut()) + { + for collator in collators.iter_mut() { + if let Some(command) = NetworkConfiguration::command_mut(collator) { + *command = value(&path) + } + } + } + } + } + + // Write adapted zombienet config to temp file + let network_config_file = Builder::new().suffix(".toml").tempfile()?; + let path = network_config_file + .path() + .to_str() + .ok_or_else(|| Error::Config("temp config file should have a path".into()))?; + write(path, self.0.to_string())?; + Ok(network_config_file) + } + + /// Resolves the canonical path of a command specified within a network configuration file. + /// + /// # Arguments + /// * `path` - The path to be resolved. + fn resolve_path(path: &Path) -> Result { + Ok(path + .canonicalize() + .map_err(|_| { + Error::Config(format!("the canonical path of {:?} could not be resolved", path)) + }) + .map(|p| p.to_str().map(|p| p.to_string()))? + .ok_or_else(|| Error::Config("the path is invalid".into()))?) + } +} + +/// The configuration required to launch the relay chain. +struct RelayChain { + /// The binary used to launch a relay chain node. + binary: Binary, + /// The additional workers required by the relay chain node. + workers: [&'static str; 2], +} + +/// The configuration required to launch a parachain. +#[derive(Debug, PartialEq)] +struct Parachain { + /// The parachain identifier on the local network. + id: u32, + /// The binary used to launch a parachain node. + binary: Binary, +} + +impl Parachain { + /// Initializes the configuration required to launch a parachain using a local binary. + /// + /// # Arguments + /// * `id` - The parachain identifier on the local network. + /// * `path` - The path to the local binary. + fn from_local(id: u32, path: PathBuf) -> Result { + let name = path + .file_name() + .and_then(|f| f.to_str()) + .ok_or_else(|| Error::Config(format!("unable to determine file name for {path:?}")))? + .to_string(); + // Check if package manifest can be found within path + let manifest = resolve_manifest(&name, &path)?; + Ok(Parachain { id, binary: Binary::Local { name, path, manifest } }) + } + + /// Initializes the configuration required to launch a parachain using a binary sourced from the specified repository. + /// + /// # Arguments + /// * `id` - The parachain identifier on the local network. + /// * `repo` - The repository to be used to source the binary. + /// * `cache` - The location used for caching binaries. + fn from_repository(id: u32, repo: &Repository, cache: &Path) -> Result { + // Check for GitHub repository to be able to download source as an archive + if repo.url.host_str().is_some_and(|h| h.to_lowercase() == "github.com") { + let github = GitHub::parse(repo.url.as_str())?; + let source = Source::GitHub(SourceCodeArchive { + owner: github.org, + repository: github.name, + reference: repo.reference.clone(), + manifest: None, + package: repo.package.clone(), + artifacts: vec![repo.package.clone()], + }); + Ok(Parachain { + id, + binary: Binary::Source { + name: repo.package.clone(), + source, + cache: cache.to_path_buf(), + }, + }) + } else { + Ok(Parachain { + id, + binary: Binary::Source { + name: repo.package.clone(), + source: Git { + url: repo.url.clone(), + reference: repo.reference.clone(), + manifest: None, + package: repo.package.clone(), + artifacts: vec![repo.package.clone()], + }, + cache: cache.to_path_buf(), + }, + }) + } + } +} + +/// A binary used to launch a node. +#[derive(Debug, PartialEq)] +pub enum Binary { + /// A local binary. + Local { + /// The name of the binary. + name: String, + /// The path of the binary. + path: PathBuf, + /// If applicable, the path to a manifest used to build the binary if missing. + manifest: Option, + }, + /// A binary which needs to be sourced. + Source { + /// The name of the binary. + name: String, + /// The source of the binary. + #[allow(private_interfaces)] + source: Source, + /// The cache to be used to store the binary. + cache: PathBuf, + }, +} + +impl Binary { + /// Whether the binary exists. + pub fn exists(&self) -> bool { + self.path().exists() + } + + /// If applicable, the latest version available. + pub fn latest(&self) -> Option<&str> { + match self { + Self::Local { .. } => None, + Self::Source { source, .. } => { + if let GitHub(ReleaseArchive { latest, .. }) = source { + latest.as_ref().map(|v| v.as_str()) + } else { + None + } + }, + } + } + + /// Whether the binary is defined locally. + pub fn local(&self) -> bool { + matches!(self, Self::Local { .. }) + } + + /// The name of the binary. + pub fn name(&self) -> &str { + match self { + Self::Local { name, .. } => name, + Self::Source { name, .. } => name, + } + } + + /// The path of the binary. + pub fn path(&self) -> PathBuf { + match self { + Self::Local { path, .. } => path.to_path_buf(), + Self::Source { name, source, cache, .. } => { + // Determine whether a specific version is specified + let version = match source { + Git { reference, .. } => reference.as_ref(), + GitHub(source) => match source { + ReleaseArchive { tag, .. } => tag.as_ref(), + SourceCodeArchive { reference, .. } => reference.as_ref(), + }, + Archive { .. } | Source::Url { .. } => None, + }; + version.map_or_else(|| cache.join(name), |v| cache.join(format!("{name}-{v}"))) + }, + } + } + + /// Attempts to resolve a version of a binary based on whether one is specified, an existing version + /// can be found cached locally, or uses the latest version. + /// + /// # Arguments + /// * `name` - The name of the binary. + /// * `specified` - If available, a version explicitly specified. + /// * `available` - The available versions, used to check for those cached locally or the latest otherwise. + /// * `cache` - The location used for caching binaries. + fn resolve_version( + name: &str, + specified: Option<&str>, + available: &[impl AsRef], + cache: &Path, + ) -> Option { + match specified { + Some(version) => Some(version.to_string()), + None => available + .iter() + .map(|v| v.as_ref()) + // Default to latest version available locally + .filter_map(|version| { + let path = cache.join(format!("{name}-{version}")); + path.exists().then_some(Some(version.to_string())) + }) + .nth(0) + .unwrap_or( + // Default to latest version + available.get(0).and_then(|version| Some(version.as_ref().to_string())), + ), + } + } + + /// Sources the binary. + /// + /// # Arguments + /// * `release` - Whether any binaries needing to be built should be done so using the release profile. + /// * `status` - Used to observe status updates. + /// * `verbose` - Whether verbose output is required. + pub async fn source( + &self, + release: bool, + status: &impl Status, + verbose: bool, + ) -> Result<(), Error> { + match self { + Self::Local { name, path, manifest, .. } => match manifest { + None => { + return Err(Error::MissingBinary(format!( + "The {path:?} binary cannot be sourced automatically." + ))) + }, + Some(manifest) => { + sourcing::from_local_package(manifest, name, release, status, verbose).await + }, + }, + Self::Source { source, cache, .. } => { + source.source(cache, release, status, verbose).await + }, + } + } + + /// Whether any locally cached version can be replaced with a newer version. + pub fn stale(&self) -> bool { + // Only binaries sourced from GitHub release archives can currently be determined as stale + let Self::Source { source: GitHub(ReleaseArchive { tag, latest, .. }), .. } = self else { + return false; + }; + latest.as_ref().map_or(false, |l| tag.as_ref() != Some(l)) + } + + /// Specifies that the latest available versions are to be used (where possible). + pub fn use_latest(&mut self) { + if let Self::Source { source: GitHub(ReleaseArchive { tag, latest, .. }), .. } = self { + if let Some(latest) = latest { + *tag = Some(latest.clone()) + } + }; + } + + /// If applicable, the version of the binary. + pub fn version(&self) -> Option<&str> { + match self { + Self::Local { .. } => None, + Self::Source { source, .. } => match source { + Git { reference, .. } => reference.as_ref(), + GitHub(source) => match source { + ReleaseArchive { tag, .. } => tag.as_ref(), + SourceCodeArchive { reference, .. } => reference.as_ref(), + }, + Archive { .. } | Source::Url { .. } => None, + }, + } + .map(|r| r.as_str()) + } +} + +/// A descriptor of a remote repository. +#[derive(Debug, PartialEq)] +struct Repository { + /// The url of the repository. + url: Url, + /// If applicable, the branch or tag to be used. + reference: Option, + /// The name of a package within the repository. Defaults to the repository name. + package: String, +} + +impl Repository { + /// Parses a url in the form of https://github.com/org/repository?package#tag into its component parts. + /// + /// # Arguments + /// * `url` - The url to be parsed. + fn parse(url: &str) -> Result { + let url = Url::parse(url)?; + let package = url.query(); + let reference = url.fragment().map(|f| f.to_string()); + + let mut url = url.clone(); + url.set_query(None); + url.set_fragment(None); + + let package = match package { + Some(b) => b, + None => GitHub::name(&url)?, + } + .to_string(); + + Ok(Self { url, reference, package }) + } +} + +/// Trait for observing status updates. +pub trait Status { + /// Update the observer with the provided `status`. + fn update(&self, status: &str); +} + +impl Status for () { + // no-op: status updates are ignored + fn update(&self, _: &str) {} +} + +/// Attempts to resolve the package manifest from the specified path. +/// +/// # Arguments +/// * `package` - The name of the package. +/// * `path` - The path to start searching. +fn resolve_manifest(package: &str, path: &Path) -> Result, Error> { + let matches_package = |config: &DocumentMut| { + config + .get("package") + .and_then(|i| i.as_table()) + .and_then(|t| t.get("name")) + .and_then(|i| i.as_str()) + .map_or(false, |n| n == package) + }; + + let mut manifest = Some(path); + 'outer: while let Some(path) = manifest { + let manifest_path = path.join("Cargo.toml"); + if !manifest_path.exists() { + manifest = path.parent(); + continue; + } + let contents = std::fs::read_to_string(&manifest_path)?; + let config = contents.parse::().map_err(|err| Error::TomlError(err.into()))?; + // Check if package manifest + if matches_package(&config) { + break 'outer; + } + // Check if package defined as a workspace member + if let Some(members) = config + .get("workspace") + .and_then(|i| i.as_table()) + .and_then(|t| t.get("members")) + .and_then(|m| m.as_array()) + .map(|a| a.iter().filter_map(|v| v.as_str())) + { + // Check manifest of each member + for member in members { + let member_path = path.join(member); + for entry in glob(member_path.to_string_lossy().as_ref()) + .expect("expected valid glob for workspace member") + .filter_map(Result::ok) + { + let manifest_path = entry.join("Cargo.toml"); + if manifest_path.exists() { + let contents = std::fs::read_to_string(&manifest_path)?; + let config = contents + .parse::() + .map_err(|err| Error::TomlError(err.into()))?; + if matches_package(&config) { + break 'outer; + } + } + } + } + }; + manifest = path.parent(); + } + Ok(manifest.map(|p| p.join("Cargo.toml"))) +} + +/// Determines the target triple based on the current platform. +fn target() -> Result<&'static str, Error> { + use std::env::consts::*; + + if OS == "windows" { + return Err(Error::UnsupportedPlatform { arch: ARCH, os: OS }); + } + + match ARCH { + "aarch64" => { + return match OS { + "macos" => Ok("aarch64-apple-darwin"), + _ => Ok("aarch64-unknown-linux-gnu"), + } + }, + "x86_64" | "x86" => { + return match OS { + "macos" => Ok("x86_64-apple-darwin"), + _ => Ok("x86_64-unknown-linux-gnu"), + } + }, + &_ => {}, + } + Err(Error::UnsupportedPlatform { arch: ARCH, os: OS }) +} + +#[cfg(test)] +mod tests { + use super::*; + use anyhow::Result; + use std::env::current_dir; + use std::{fs::File, io::Write}; + use tempfile::tempdir; + + mod zombienet { + use super::*; + use sourcing::tests::Output; + + #[tokio::test] + async fn new_with_relay_only_works() -> Result<()> { + let temp_dir = tempdir()?; + let cache = PathBuf::from(temp_dir.path()); + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!( + config.as_file(), + r#" +[relaychain] +chain = "rococo-local" +"# + )?; + let version = "v1.12.0"; + + let zombienet = + Zombienet::new(&cache, config.path().to_str().unwrap(), Some(version), None, None) + .await?; + + let relay_chain = &zombienet.relay_chain.binary; + assert_eq!(relay_chain.name(), "polkadot"); + assert_eq!(relay_chain.path(), temp_dir.path().join(format!("polkadot-{version}"))); + assert_eq!(relay_chain.version().unwrap(), version); + assert!(matches!( + relay_chain, + Binary::Source { source: Source::GitHub(ReleaseArchive { tag, .. }), .. } + if *tag == Some(version.to_string()) + )); + assert!(zombienet.parachains.is_empty()); + Ok(()) + } + + #[tokio::test] + async fn new_with_default_command_works() -> Result<()> { + let temp_dir = tempdir()?; + let cache = PathBuf::from(temp_dir.path()); + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!( + config.as_file(), + r#" +[relaychain] +chain = "rococo-local" +default_command = "./bin-v1.6.0/polkadot" +"# + )?; + let version = "v1.12.0"; + + let zombienet = + Zombienet::new(&cache, config.path().to_str().unwrap(), Some(version), None, None) + .await?; + + let relay_chain = &zombienet.relay_chain.binary; + assert_eq!(relay_chain.name(), "polkadot"); + assert_eq!(relay_chain.path(), temp_dir.path().join(format!("polkadot-{version}"))); + assert_eq!(relay_chain.version().unwrap(), version); + assert!(matches!( + relay_chain, + Binary::Source { source: Source::GitHub(ReleaseArchive { tag, .. }), .. } + if *tag == Some(version.to_string()) + )); + assert!(zombienet.parachains.is_empty()); + Ok(()) + } + + #[tokio::test] + async fn new_with_node_command_works() -> Result<()> { + let temp_dir = tempdir()?; + let cache = PathBuf::from(temp_dir.path()); + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!( + config.as_file(), + r#" +[relaychain] +chain = "rococo-local" + +[[relaychain.nodes]] +name = "alice" +validator = true +command = "polkadot" +"# + )?; + let version = "v1.12.0"; + + let zombienet = + Zombienet::new(&cache, config.path().to_str().unwrap(), Some(version), None, None) + .await?; + + let relay_chain = &zombienet.relay_chain.binary; + assert_eq!(relay_chain.name(), "polkadot"); + assert_eq!(relay_chain.path(), temp_dir.path().join(format!("polkadot-{version}"))); + assert_eq!(relay_chain.version().unwrap(), version); + assert!(matches!( + relay_chain, + Binary::Source { source: Source::GitHub(ReleaseArchive { tag, .. }), .. } + if *tag == Some(version.to_string()) + )); + assert!(zombienet.parachains.is_empty()); + Ok(()) + } + + #[tokio::test] + async fn new_ensures_node_commands_valid() -> Result<()> { + let temp_dir = tempdir()?; + let cache = PathBuf::from(temp_dir.path()); + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!( + config.as_file(), + r#" +[relaychain] +chain = "rococo-local" + +[[relaychain.nodes]] +name = "alice" +validator = true +command = "polkadot" + +[[relaychain.nodes]] +name = "bob" +validator = true +command = "polkadot-v1.12.0" +"# + )?; + + assert!(matches!( + Zombienet::new(&cache, config.path().to_str().unwrap(), None, None, None).await, + Err(Error::UnsupportedCommand(error)) + if error == "the relay chain command is unsupported: polkadot-v1.12.0" + )); + Ok(()) + } + + #[tokio::test] + async fn new_ensures_node_command_valid() -> Result<()> { + let temp_dir = tempdir()?; + let cache = PathBuf::from(temp_dir.path()); + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!( + config.as_file(), + r#" +[relaychain] +chain = "rococo-local" +default_command = "polkadot" + +[[relaychain.nodes]] +name = "alice" +validator = true +command = "polkadot-v1.12.0" +"# + )?; + + assert!(matches!( + Zombienet::new(&cache, config.path().to_str().unwrap(), None, None, None).await, + Err(Error::UnsupportedCommand(error)) + if error == "the relay chain command is unsupported: polkadot-v1.12.0" + )); + Ok(()) + } + + #[tokio::test] + async fn new_with_system_chain_works() -> Result<()> { + let temp_dir = tempdir()?; + let cache = PathBuf::from(temp_dir.path()); + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!( + config.as_file(), + r#" +[relaychain] +chain = "rococo-local" + +[[parachains]] +id = 1000 +chain = "asset-hub-rococo-local" +"# + )?; + let system_parachain_version = "v1.12.0"; + + let zombienet = Zombienet::new( + &cache, + config.path().to_str().unwrap(), + Some("v1.11.0"), + Some(system_parachain_version), + None, + ) + .await?; + + assert_eq!(zombienet.parachains.len(), 1); + let system_parachain = &zombienet.parachains.get(&1000).unwrap().binary; + assert_eq!(system_parachain.name(), "polkadot-parachain"); + assert_eq!( + system_parachain.path(), + temp_dir.path().join(format!("polkadot-parachain-{system_parachain_version}")) + ); + assert_eq!(system_parachain.version().unwrap(), system_parachain_version); + assert!(matches!( + system_parachain, + Binary::Source { source: Source::GitHub(ReleaseArchive { tag, .. }), .. } + if *tag == Some(system_parachain_version.to_string()) + )); + Ok(()) + } + + #[tokio::test] + async fn new_with_pop_works() -> Result<()> { + let temp_dir = tempdir()?; + let cache = PathBuf::from(temp_dir.path()); + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!( + config.as_file(), + r#" +[relaychain] +chain = "rococo-local" + +[[parachains]] +id = 4385 +default_command = "pop-node" +"# + )?; + + let zombienet = + Zombienet::new(&cache, config.path().to_str().unwrap(), None, None, None).await?; + + assert_eq!(zombienet.parachains.len(), 1); + let pop = &zombienet.parachains.get(&4385).unwrap().binary; + let version = pop.latest().unwrap(); + assert_eq!(pop.name(), "pop-node"); + assert_eq!(pop.path(), temp_dir.path().join(format!("pop-node-{version}"))); + assert_eq!(pop.version().unwrap(), version); + assert!(matches!( + pop, + Binary::Source { source: Source::GitHub(ReleaseArchive { tag, .. }), .. } + if *tag == Some(version.to_string()) + )); + Ok(()) + } + + #[tokio::test] + async fn new_with_pop_version_works() -> Result<()> { + let temp_dir = tempdir()?; + let cache = PathBuf::from(temp_dir.path()); + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!( + config.as_file(), + r#" +[relaychain] +chain = "rococo-local" + +[[parachains]] +id = 4385 +default_command = "pop-node" +"# + )?; + let version = "v1.0"; + + let zombienet = Zombienet::new( + &cache, + config.path().to_str().unwrap(), + None, + None, + Some(&vec![format!("https://github.com/r0gue-io/pop-node#{version}")]), + ) + .await?; + + assert_eq!(zombienet.parachains.len(), 1); + let pop = &zombienet.parachains.get(&4385).unwrap().binary; + assert_eq!(pop.name(), "pop-node"); + assert_eq!(pop.path(), temp_dir.path().join(format!("pop-node-{version}"))); + assert_eq!(pop.version().unwrap(), version); + assert!(matches!( + pop, + Binary::Source { source: Source::GitHub(ReleaseArchive { tag, .. }), .. } + if *tag == Some(version.to_string()) + )); + Ok(()) + } + + #[tokio::test] + async fn new_with_local_parachain_works() -> Result<()> { + let temp_dir = tempdir()?; + let cache = PathBuf::from(temp_dir.path()); + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!( + config.as_file(), + r#" +[relaychain] +chain = "rococo-local" + +[[parachains]] +id = 2000 +default_command = "./target/release/parachain-template-node" +"# + )?; + + let zombienet = + Zombienet::new(&cache, config.path().to_str().unwrap(), None, None, None).await?; + + assert_eq!(zombienet.parachains.len(), 1); + let pop = &zombienet.parachains.get(&2000).unwrap().binary; + assert_eq!(pop.name(), "parachain-template-node"); + assert_eq!(pop.path(), Path::new("./target/release/parachain-template-node")); + assert_eq!(pop.version(), None); + assert!(matches!(pop, Binary::Local { .. })); + Ok(()) + } + + #[tokio::test] + async fn new_with_collator_command_works() -> Result<()> { + let temp_dir = tempdir()?; + let cache = PathBuf::from(temp_dir.path()); + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!( + config.as_file(), + r#" +[relaychain] +chain = "rococo-local" + +[[parachains]] +id = 2000 + +[[parachains.collators]] +name = "collator-01" +command = "./target/release/parachain-template-node" +"# + )?; + + let zombienet = + Zombienet::new(&cache, config.path().to_str().unwrap(), None, None, None).await?; + + assert_eq!(zombienet.parachains.len(), 1); + let pop = &zombienet.parachains.get(&2000).unwrap().binary; + assert_eq!(pop.name(), "parachain-template-node"); + assert_eq!(pop.path(), Path::new("./target/release/parachain-template-node")); + assert_eq!(pop.version(), None); + assert!(matches!(pop, Binary::Local { .. })); + Ok(()) + } + + #[tokio::test] + async fn new_with_moonbeam_works() -> Result<()> { + let temp_dir = tempdir()?; + let cache = PathBuf::from(temp_dir.path()); + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!( + config.as_file(), + r#" +[relaychain] +chain = "rococo-local" + +[[parachains]] +id = 2000 +default_command = "moonbeam" +"# + )?; + let version = "v0.38.0"; + + let zombienet = Zombienet::new( + &cache, + config.path().to_str().unwrap(), + None, + None, + Some(&vec![format!("https://github.com/moonbeam-foundation/moonbeam#{version}")]), + ) + .await?; + + assert_eq!(zombienet.parachains.len(), 1); + let pop = &zombienet.parachains.get(&2000).unwrap().binary; + assert_eq!(pop.name(), "moonbeam"); + assert_eq!(pop.path(), temp_dir.path().join(format!("moonbeam-{version}"))); + assert_eq!(pop.version().unwrap(), version); + assert!(matches!( + pop, + Binary::Source { source: Source::GitHub(SourceCodeArchive { reference, .. }), .. } + if *reference == Some(version.to_string()) + )); + Ok(()) + } + + #[tokio::test] + async fn new_ensures_parachain_id_exists() -> Result<()> { + let temp_dir = tempdir()?; + let cache = PathBuf::from(temp_dir.path()); + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!( + config.as_file(), + r#" +[relaychain] +chain = "rococo-local" + +[[parachains]] +"# + )?; + + assert!(matches!( + Zombienet::new(&cache, config.path().to_str().unwrap(), None, None, None).await, + Err(Error::Config(error)) + if error == "expected `parachain` to have `id`" + )); + Ok(()) + } + + #[tokio::test] + async fn new_handles_missing_binary() -> Result<()> { + let temp_dir = tempdir()?; + let cache = PathBuf::from(temp_dir.path()); + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!( + config.as_file(), + r#" +[relaychain] +chain = "rococo-local" + +[[parachains]] +id = 404 +default_command = "missing-binary" +"# + )?; + + assert!(matches!( + Zombienet::new(&cache, config.path().to_str().unwrap(), None, None, None).await, + Err(Error::MissingBinary(command)) + if command == "missing-binary" + )); + Ok(()) + } + + #[tokio::test] + async fn binaries_works() -> Result<()> { + let temp_dir = tempdir()?; + let cache = PathBuf::from(temp_dir.path()); + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!( + config.as_file(), + r#" +[relaychain] +chain = "rococo-local" + +[[parachains]] +id = 1000 +chain = "asset-hub-rococo-local" + +[[parachains]] +id = 2000 +default_command = "./target/release/parachain-template-node" + +[[parachains]] +id = 4385 +default_command = "pop-node" +"# + )?; + + let mut zombienet = + Zombienet::new(&cache, config.path().to_str().unwrap(), None, None, None).await?; + assert_eq!(zombienet.binaries().count(), 4); + Ok(()) + } + + #[tokio::test] + async fn spawn_ensures_relay_chain_binary_exists() -> Result<()> { + let temp_dir = tempdir()?; + let cache = PathBuf::from(temp_dir.path()); + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!( + config.as_file(), + r#" +[relaychain] +chain = "rococo-local" +"# + )?; + + let mut zombienet = + Zombienet::new(&cache, config.path().to_str().unwrap(), None, None, None).await?; + assert!(matches!( + zombienet.spawn().await, + Err(Error::MissingBinary(error)) + if error == "polkadot" + )); + Ok(()) + } + + #[tokio::test] + async fn spawn_ensures_relay_chain_version_set() -> Result<()> { + let temp_dir = tempdir()?; + let cache = PathBuf::from(temp_dir.path()); + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!( + config.as_file(), + r#" +[relaychain] +chain = "rococo-local" +"# + )?; + File::create(cache.join("polkadot"))?; + + let mut zombienet = + Zombienet::new(&cache, config.path().to_str().unwrap(), None, None, None).await?; + if let Binary::Source { source: Source::GitHub(ReleaseArchive { tag, .. }), .. } = + &mut zombienet.relay_chain.binary + { + *tag = None + } + assert!(matches!( + zombienet.spawn().await, + Err(Error::MissingBinary(error)) + if error == "Could not determine version for `polkadot` binary", + )); + Ok(()) + } + + #[tokio::test] + async fn spawn_symlinks_workers() -> Result<()> { + let temp_dir = tempdir()?; + let cache = PathBuf::from(temp_dir.path()); + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!( + config.as_file(), + r#" +[relaychain] +chain = "rococo-local" +"# + )?; + let version = "v1.12.0"; + File::create(cache.join(format!("polkadot-{version}")))?; + File::create(cache.join(format!("polkadot-execute-worker-{version}")))?; + File::create(cache.join(format!("polkadot-prepare-worker-{version}")))?; + + let mut zombienet = + Zombienet::new(&cache, config.path().to_str().unwrap(), None, None, None).await?; + assert!(!cache.join("polkadot-execute-worker").exists()); + assert!(!cache.join("polkadot-prepare-worker").exists()); + let _ = zombienet.spawn().await; + assert!(cache.join("polkadot-execute-worker").exists()); + assert!(cache.join("polkadot-prepare-worker").exists()); + let _ = zombienet.spawn().await; + Ok(()) + } + + #[tokio::test] + async fn spawn_works() -> Result<()> { + let temp_dir = tempdir()?; + let cache = PathBuf::from(temp_dir.path()); + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!( + config.as_file(), + r#" +[relaychain] +chain = "rococo-local" + +[[relaychain.nodes]] +name = "alice" +validator = true +"# + )?; + + let mut zombienet = + Zombienet::new(&cache, config.path().to_str().unwrap(), None, None, None).await?; + for b in zombienet.binaries() { + b.source(true, &Output, true).await?; + } + + zombienet.spawn().await?; + Ok(()) + } + } + + mod network_config { + use super::*; + use std::io::Read; + use std::{ + fs::{create_dir_all, File}, + io::Write, + path::PathBuf, + }; + use tempfile::{tempdir, Builder}; + + #[test] + fn initialising_from_file_fails_when_missing() { + assert!(NetworkConfiguration::from(PathBuf::new()).is_err()); + } + + #[test] + fn initialising_from_file_fails_when_malformed() -> Result<(), Error> { + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!(config.as_file(), "[")?; + assert!(matches!(NetworkConfiguration::from(config.path()), Err(Error::TomlError(..)))); + Ok(()) + } + + #[test] + fn initialising_from_file_fails_when_relaychain_missing() -> Result<(), Error> { + let config = Builder::new().suffix(".toml").tempfile()?; + assert!(matches!(NetworkConfiguration::from(config.path()), Err(Error::Config(..)))); + Ok(()) + } + + #[test] + fn initializes_relay_from_file() -> Result<(), Error> { + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!( + config.as_file(), + r#" + [relaychain] + chain = "rococo-local" + default_command = "polkadot" + [[relaychain.nodes]] + name = "alice" + "# + )?; + let network_config = NetworkConfiguration::from(config.path())?; + let relay_chain = network_config.relay_chain()?; + assert_eq!("rococo-local", relay_chain["chain"].as_str().unwrap()); + assert_eq!( + "polkadot", + NetworkConfiguration::default_command(relay_chain).unwrap().as_str().unwrap() + ); + let nodes = NetworkConfiguration::nodes(relay_chain).unwrap(); + assert_eq!("alice", nodes.get(0).unwrap()["name"].as_str().unwrap()); + assert!(network_config.parachains().is_none()); + Ok(()) + } + + #[test] + fn initializes_parachains_from_file() -> Result<(), Error> { + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!( + config.as_file(), + r#" + [relaychain] + chain = "rococo-local" + [[parachains]] + id = 2000 + default_command = "node" + "# + )?; + let network_config = NetworkConfiguration::from(config.path())?; + let parachains = network_config.parachains().unwrap(); + let para_2000 = parachains.get(0).unwrap(); + assert_eq!(2000, para_2000["id"].as_integer().unwrap()); + assert_eq!( + "node", + NetworkConfiguration::default_command(para_2000).unwrap().as_str().unwrap() + ); + Ok(()) + } + + #[test] + fn configure_works() -> Result<(), Error> { + let config = Builder::new().suffix(".toml").tempfile()?; + writeln!( + config.as_file(), + r#" +[relaychain] +chain = "rococo-local" + +[[relaychain.nodes]] +name = "alice" +command = "polkadot" + +[[parachains]] +id = 1000 +chain = "asset-hub-rococo-local" + +[[parachains.collators]] +name = "asset-hub" +command = "polkadot-parachain" + +[[parachains]] +id = 2000 +default_command = "pop-node" + +[[parachains.collators]] +name = "pop" +command = "pop-node" + +[[parachains]] +id = 2001 +default_command = "./target/release/parachain-template-node" + +[[parachains.collators]] +name = "collator" +command = "./target/release/parachain-template-node" +"# + )?; + let mut network_config = NetworkConfiguration::from(config.path())?; + + let relay_chain_binary = Builder::new().tempfile()?; + let relay_chain = relay_chain_binary.path(); + File::create(&relay_chain)?; + let system_chain_binary = Builder::new().tempfile()?; + let system_chain = system_chain_binary.path(); + File::create(&system_chain)?; + let pop_binary = Builder::new().tempfile()?; + let pop = pop_binary.path(); + File::create(&pop)?; + let parachain_template_node = Builder::new().tempfile()?; + let parachain_template = parachain_template_node.path(); + create_dir_all(parachain_template.parent().unwrap())?; + File::create(¶chain_template)?; + + let mut configured = network_config.configure( + &RelayChain { + binary: Binary::Local { + name: "polkadot".to_string(), + path: relay_chain.to_path_buf(), + manifest: None, + }, + workers: ["polkadot-execute-worker", ""], + }, + &[ + ( + 1000, + Parachain { + id: 1000, + binary: Binary::Local { + name: "polkadot-parachain".to_string(), + path: system_chain.to_path_buf(), + manifest: None, + }, + }, + ), + ( + 2000, + Parachain { + id: 2000, + binary: Binary::Local { + name: "pop-node".to_string(), + path: pop.to_path_buf(), + manifest: None, + }, + }, + ), + ( + 2001, + Parachain { + id: 2001, + binary: Binary::Local { + name: "parachain-template-node".to_string(), + path: parachain_template.to_path_buf(), + manifest: None, + }, + }, + ), + ] + .into(), + )?; + assert_eq!("toml", configured.path().extension().unwrap()); + + let mut contents = String::new(); + configured.read_to_string(&mut contents)?; + println!("{contents}"); + assert_eq!( + contents, + format!( + r#" +[relaychain] +chain = "rococo-local" +default_command = "{0}" + +[[relaychain.nodes]] +name = "alice" +command = "{0}" + +[[parachains]] +id = 1000 +chain = "asset-hub-rococo-local" +default_command = "{1}" + +[[parachains.collators]] +name = "asset-hub" +command = "{1}" + +[[parachains]] +id = 2000 +default_command = "{2}" + +[[parachains.collators]] +name = "pop" +command = "{2}" + +[[parachains]] +id = 2001 +default_command = "{3}" + +[[parachains.collators]] +name = "collator" +command = "{3}" + +[settings] +timeout = 1000 +node_spawn_timeout = 300 + +"#, + relay_chain.canonicalize()?.to_str().unwrap(), + system_chain.canonicalize()?.to_str().unwrap(), + pop.canonicalize()?.to_str().unwrap(), + parachain_template.canonicalize()?.to_str().unwrap() + ) + ); + Ok(()) + } + + #[test] + fn resolves_path() -> Result<(), Error> { + let working_dir = tempdir()?; + let path = working_dir.path().join("./target/release/node"); + assert!( + matches!(NetworkConfiguration::resolve_path(&path), Err(Error::Config(message)) + if message == format!("the canonical path of {:?} could not be resolved", path) + ) + ); + + create_dir_all(path.parent().unwrap())?; + File::create(&path)?; + assert_eq!( + NetworkConfiguration::resolve_path(&path)?, + path.canonicalize()?.to_str().unwrap().to_string() + ); + Ok(()) + } + } + + mod parachain { + use super::*; + use crate::up::sourcing::GitHub::SourceCodeArchive; + use std::path::PathBuf; + + #[test] + fn initializes_from_local_binary() -> Result<(), Error> { + let name = "parachain-template-node"; + let command = PathBuf::from("./target/release").join(&name); + assert_eq!( + Parachain::from_local(2000, command.clone())?, + Parachain { + id: 2000, + binary: Binary::Local { name: name.to_string(), path: command, manifest: None } + } + ); + Ok(()) + } + + #[test] + fn initializes_from_local_package() -> Result<(), Error> { + let name = "pop-parachains"; + let command = PathBuf::from("./target/release").join(&name); + assert_eq!( + Parachain::from_local(2000, command.clone())?, + Parachain { + id: 2000, + binary: Binary::Local { + name: name.to_string(), + path: command, + manifest: Some(PathBuf::from("./Cargo.toml")) + } + } + ); + Ok(()) + } + + #[test] + fn initializes_from_git() -> Result<(), Error> { + let repo = Repository::parse("https://git.com/r0gue-io/pop-node#v1.0")?; + let cache = tempdir()?; + assert_eq!( + Parachain::from_repository(2000, &repo, cache.path())?, + Parachain { + id: 2000, + binary: Binary::Source { + name: "pop-node".to_string(), + source: Source::Git { + url: repo.url, + reference: repo.reference, + manifest: None, + package: "pop-node".to_string(), + artifacts: vec!["pop-node".to_string()], + }, + cache: cache.path().to_path_buf(), + } + } + ); + Ok(()) + } + + #[test] + fn initializes_from_github() -> Result<(), Error> { + let repo = Repository::parse("https://github.com/r0gue-io/pop-node#v1.0")?; + let cache = tempdir()?; + assert_eq!( + Parachain::from_repository(2000, &repo, cache.path())?, + Parachain { + id: 2000, + binary: Binary::Source { + name: "pop-node".to_string(), + source: Source::GitHub(SourceCodeArchive { + owner: "r0gue-io".to_string(), + repository: "pop-node".to_string(), + reference: Some("v1.0".to_string()), + manifest: None, + package: "pop-node".to_string(), + artifacts: vec!["pop-node".to_string()], + }), + cache: cache.path().to_path_buf(), + } + } + ); + Ok(()) + } + } + + mod binary { + use super::*; + use duct::cmd; + use sourcing::tests::Output; + use std::fs::create_dir_all; + + #[test] + fn local_binary_works() -> Result<()> { + let name = "polkadot"; + let temp_dir = tempdir()?; + let path = temp_dir.path().join(name); + File::create(&path)?; + + let binary = + Binary::Local { name: name.to_string(), path: path.clone(), manifest: None }; + + assert!(binary.exists()); + assert_eq!(binary.latest(), None); + assert!(binary.local()); + assert_eq!(binary.name(), name); + assert_eq!(binary.path(), path); + assert!(!binary.stale()); + assert_eq!(binary.version(), None); + Ok(()) + } + + #[test] + fn local_package_works() -> Result<()> { + let name = "polkadot"; + let temp_dir = tempdir()?; + let path = temp_dir.path().join("target/release").join(name); + create_dir_all(&path.parent().unwrap())?; + File::create(&path)?; + let manifest = Some(temp_dir.path().join("Cargo.toml")); + + let binary = Binary::Local { name: name.to_string(), path: path.clone(), manifest }; + + assert!(binary.exists()); + assert_eq!(binary.latest(), None); + assert!(binary.local()); + assert_eq!(binary.name(), name); + assert_eq!(binary.path(), path); + assert!(!binary.stale()); + assert_eq!(binary.version(), None); + Ok(()) + } + + #[test] + fn resolve_version_works() -> Result<()> { + let name = "polkadot"; + let temp_dir = tempdir()?; + + let available = vec!["v1.13.0", "v1.12.0", "v1.11.0"]; + + // Specified + let specified = Some("v1.12.0"); + assert_eq!( + Binary::resolve_version(name, specified, &available, temp_dir.path()).unwrap(), + specified.unwrap() + ); + // Latest + assert_eq!( + Binary::resolve_version(name, None, &available, temp_dir.path()).unwrap(), + available[0] + ); + // Cached + File::create(temp_dir.path().join(format!("{name}-{}", available[1])))?; + assert_eq!( + Binary::resolve_version(name, None, &available, temp_dir.path()).unwrap(), + available[1] + ); + Ok(()) + } + + #[test] + fn sourced_from_archive_works() -> Result<()> { + let name = "polkadot"; + let url = "https://github.com/r0gue-io/polkadot/releases/latest/download/polkadot-aarch64-apple-darwin.tar.gz".to_string(); + let contents = vec![ + name.to_string(), + "polkadot-execute-worker".into(), + "polkadot-prepare-worker".into(), + ]; + let temp_dir = tempdir()?; + let path = temp_dir.path().join(name); + File::create(&path)?; + + let mut binary = Binary::Source { + name: name.to_string(), + source: Archive { url: url.to_string(), contents }, + cache: temp_dir.path().to_path_buf(), + }; + + assert!(binary.exists()); + assert_eq!(binary.latest(), None); + assert!(!binary.local()); + assert_eq!(binary.name(), name); + assert_eq!(binary.path(), path); + assert!(!binary.stale()); + assert_eq!(binary.version(), None); + binary.use_latest(); + assert_eq!(binary.version(), None); + Ok(()) + } + + #[test] + fn sourced_from_git_works() -> Result<()> { + let package = "hello_world"; + let url = Url::parse("https://github.com/hpaluch/rust-hello-world")?; + let temp_dir = tempdir()?; + for reference in [None, Some("436b7dbffdfaaf7ad90bf44ae8fdcb17eeee65a3".to_string())] { + let path = temp_dir.path().join( + reference + .as_ref() + .map_or(package.into(), |reference| format!("{package}-{reference}")), + ); + File::create(&path)?; + + let mut binary = Binary::Source { + name: package.to_string(), + source: Git { + url: url.clone(), + reference: reference.clone(), + manifest: None, + package: package.to_string(), + artifacts: vec![package.to_string()], + }, + cache: temp_dir.path().to_path_buf(), + }; + + assert!(binary.exists()); + assert_eq!(binary.latest(), None); + assert!(!binary.local()); + assert_eq!(binary.name(), package); + assert_eq!(binary.path(), path); + assert!(!binary.stale()); + assert_eq!(binary.version(), reference.as_ref().map(|r| r.as_str())); + binary.use_latest(); + assert_eq!(binary.version(), reference.as_ref().map(|r| r.as_str())); + } + + Ok(()) + } + + #[test] + fn sourced_from_github_release_archive_works() -> Result<()> { + let owner = "r0gue-io"; + let repository = "polkadot"; + let tag_format = "polkadot-{tag}"; + let name = "polkadot"; + let archive = format!("{name}-{}.tar.gz", target()?); + let contents = ["polkadot", "polkadot-execute-worker", "polkadot-prepare-worker"]; + let temp_dir = tempdir()?; + for tag in [None, Some("v1.12.0".to_string())] { + let path = temp_dir + .path() + .join(tag.as_ref().map_or(name.to_string(), |t| format!("{name}-{t}"))); + File::create(&path)?; + for latest in [None, Some("v2.0.0".to_string())] { + let mut binary = Binary::Source { + name: name.to_string(), + source: GitHub(ReleaseArchive { + owner: owner.into(), + repository: repository.into(), + tag: tag.clone(), + tag_format: Some(tag_format.to_string()), + archive: archive.clone(), + contents: contents.to_vec(), + latest: latest.clone(), + }), + cache: temp_dir.path().to_path_buf(), + }; + + assert!(binary.exists()); + assert_eq!(binary.latest(), latest.as_ref().map(|l| l.as_str())); + assert!(!binary.local()); + assert_eq!(binary.name(), name); + assert_eq!(binary.path(), path); + assert_eq!(binary.stale(), latest.is_some()); + assert_eq!(binary.version(), tag.as_ref().map(|t| t.as_str())); + binary.use_latest(); + if latest.is_some() { + assert_eq!(binary.version(), latest.as_ref().map(|l| l.as_str())); + } + } + } + Ok(()) + } + + #[test] + fn sourced_from_github_source_code_archive_works() -> Result<()> { + let owner = "paritytech"; + let repository = "polkadot-sdk"; + let package = "polkadot"; + let manifest = "substrate/Cargo.toml"; + let temp_dir = tempdir()?; + for reference in [None, Some("72dba98250a6267c61772cd55f8caf193141050f".to_string())] { + let path = temp_dir.path().join( + reference.as_ref().map_or(package.to_string(), |t| format!("{package}-{t}")), + ); + File::create(&path)?; + let mut binary = Binary::Source { + name: package.to_string(), + source: GitHub(SourceCodeArchive { + owner: owner.to_string(), + repository: repository.to_string(), + reference: reference.clone(), + manifest: Some(PathBuf::from(manifest)), + package: package.to_string(), + artifacts: vec![package.to_string()], + }), + cache: temp_dir.path().to_path_buf(), + }; + + assert!(binary.exists()); + assert_eq!(binary.latest(), None); + assert!(!binary.local()); + assert_eq!(binary.name(), package); + assert_eq!(binary.path(), path); + assert_eq!(binary.stale(), false); + assert_eq!(binary.version(), reference.as_ref().map(|r| r.as_str())); + binary.use_latest(); + assert_eq!(binary.version(), reference.as_ref().map(|l| l.as_str())); + } + Ok(()) + } + + #[test] + fn sourced_from_url_works() -> Result<()> { + let name = "polkadot"; + let url = + "https://github.com/paritytech/polkadot-sdk/releases/latest/download/polkadot.asc"; + let temp_dir = tempdir()?; + let path = temp_dir.path().join(name); + File::create(&path)?; + + let mut binary = Binary::Source { + name: name.to_string(), + source: Source::Url { url: url.to_string(), name: name.to_string() }, + cache: temp_dir.path().to_path_buf(), + }; + + assert!(binary.exists()); + assert_eq!(binary.latest(), None); + assert!(!binary.local()); + assert_eq!(binary.name(), name); + assert_eq!(binary.path(), path); + assert!(!binary.stale()); + assert_eq!(binary.version(), None); + binary.use_latest(); + assert_eq!(binary.version(), None); + Ok(()) + } + + #[tokio::test] + async fn sourcing_from_local_binary_not_supported() -> Result<()> { + let name = "polkadot".to_string(); + let temp_dir = tempdir()?; + let path = temp_dir.path().join(&name); + assert!(matches!( + Binary::Local { name, path: path.clone(), manifest: None }.source(true, &Output, true).await, + Err(Error::MissingBinary(error)) if error == format!("The {path:?} binary cannot be sourced automatically.") + )); + Ok(()) + } + + #[tokio::test] + async fn sourcing_from_local_package_works() -> Result<()> { + let temp_dir = tempdir()?; + let name = "hello_world"; + cmd("cargo", ["new", name, "--bin"]).dir(temp_dir.path()).run()?; + let path = temp_dir.path().join(name); + let manifest = Some(path.join("Cargo.toml")); + let path = path.join("target/release").join(name); + Binary::Local { name: name.to_string(), path: path.clone(), manifest } + .source(true, &Output, true) + .await?; + assert!(path.exists()); + Ok(()) + } + + #[tokio::test] + async fn sourcing_from_url_works() -> Result<()> { + let name = "polkadot"; + let url = + "https://github.com/paritytech/polkadot-sdk/releases/latest/download/polkadot.asc"; + let temp_dir = tempdir()?; + let path = temp_dir.path().join(name); + + Binary::Source { + name: name.to_string(), + source: Source::Url { url: url.to_string(), name: name.to_string() }, + cache: temp_dir.path().to_path_buf(), + } + .source(true, &Output, true) + .await?; + assert!(path.exists()); + Ok(()) + } + } + + mod repository { + use super::{Error, Repository}; + use url::Url; + + #[test] + fn parsing_full_url_works() { + assert_eq!( + Repository::parse("https://github.com/org/repository?package#tag").unwrap(), + Repository { + url: Url::parse("https://github.com/org/repository").unwrap(), + reference: Some("tag".into()), + package: "package".into(), + } + ); + } + + #[test] + fn parsing_simple_url_works() { + let url = "https://github.com/org/repository"; + assert_eq!( + Repository::parse(url).unwrap(), + Repository { + url: Url::parse(url).unwrap(), + reference: None, + package: "repository".into(), + } + ); + } + + #[test] + fn parsing_invalid_url_returns_error() { + assert!(matches!( + Repository::parse("github.com/org/repository"), + Err(Error::ParseError(..)) + )); + } + } + + #[test] + fn resolve_manifest_works() -> Result<()> { + let current_dir = current_dir()?; + // Crate + assert_eq!( + current_dir.join("Cargo.toml"), + resolve_manifest("pop-parachains", ¤t_dir)?.unwrap() + ); + // Workspace + assert_eq!( + current_dir.join("../../Cargo.toml").canonicalize()?, + resolve_manifest("pop-cli", ¤t_dir)?.unwrap() + ); + Ok(()) + } + + #[test] + fn target_works() -> Result<()> { + use std::{process::Command, str}; + let output = Command::new("rustc").arg("-vV").output()?; + let output = str::from_utf8(&output.stdout)?; + let target = output + .lines() + .find(|l| l.starts_with("host: ")) + .map(|l| &l[6..]) + .unwrap() + .to_string(); + assert_eq!(super::target()?, target); + Ok(()) + } +} diff --git a/crates/pop-parachains/src/up/parachains.rs b/crates/pop-parachains/src/up/parachains.rs new file mode 100644 index 000000000..eac7da9e8 --- /dev/null +++ b/crates/pop-parachains/src/up/parachains.rs @@ -0,0 +1,212 @@ +// SPDX-License-Identifier: GPL-3.0 +use super::{ + sourcing, + sourcing::{ + traits::{Source as _, *}, + GitHub::ReleaseArchive, + Source, + }, + target, Binary, Error, +}; +use std::path::Path; +use strum::VariantArray as _; +use strum_macros::{EnumProperty, VariantArray}; + +/// A supported parachain. +#[derive(Debug, EnumProperty, PartialEq, VariantArray)] +pub(super) enum Parachain { + /// Parachain containing core Polkadot protocol features. + #[strum(props( + Repository = "https://github.com/r0gue-io/polkadot", + Binary = "polkadot-parachain", + TagFormat = "polkadot-{tag}", + Fallback = "v1.12.0" + ))] + System, + /// Pop Network makes it easy for smart contract developers to use the power of Polkadot. + #[strum(props( + Repository = "https://github.com/r0gue-io/pop-node", + Binary = "pop-node", + Prerelease = "true", + Fallback = "v0.1.0-alpha2" + ))] + Pop, +} + +impl TryInto for Parachain { + /// Attempt the conversion. + /// + /// # Arguments + /// * `tag` - If applicable, a tag used to determine a specific release. + /// * `latest` - If applicable, some specifier used to determine the latest source. + fn try_into(&self, tag: Option, latest: Option) -> Result { + Ok(match self { + Parachain::System | Parachain::Pop => { + // Source from GitHub release asset + let repo = crate::GitHub::parse(self.repository())?; + Source::GitHub(ReleaseArchive { + owner: repo.org, + repository: repo.name, + tag, + tag_format: self.tag_format().map(|t| t.into()), + archive: format!("{}-{}.tar.gz", self.binary(), target()?), + contents: vec![self.binary()], + latest, + }) + }, + }) + } +} + +impl sourcing::traits::Source for Parachain {} + +/// Initialises the configuration required to launch a system parachain. +/// +/// # Arguments +/// * `id` - The parachain identifier. +/// * `command` - The command specified. +/// * `version` - The version of the parachain binary to be used. +/// * `version` - The version of the relay chain binary being used. +/// * `cache` - The cache to be used. +pub(super) async fn system( + id: u32, + command: &str, + version: Option<&str>, + relay_chain: &str, + cache: &Path, +) -> Result, Error> { + let para = &Parachain::System; + let name = para.binary(); + if command != name { + return Ok(None); + } + let tag = match version { + Some(version) => Some(version.to_string()), + None => { + // Default to same version as relay chain when not explicitly specified + let version = relay_chain.to_string(); + Some(version) + }, + }; + let source = TryInto::try_into(para, tag, para.releases().await?.into_iter().nth(0))?; + let binary = Binary::Source { name: name.to_string(), source, cache: cache.to_path_buf() }; + return Ok(Some(super::Parachain { id, binary })); +} + +/// Initialises the configuration required to launch a parachain. +/// +/// # Arguments +/// * `id` - The parachain identifier. +/// * `command` - The command specified. +/// * `version` - The version of the parachain binary to be used. +/// * `cache` - The cache to be used. +pub(super) async fn from( + id: u32, + command: &str, + version: Option<&str>, + cache: &Path, +) -> Result, Error> { + for para in Parachain::VARIANTS.iter().filter(|p| p.binary() == command) { + let releases = para.releases().await?; + let tag = Binary::resolve_version(command, version, &releases, cache); + let binary = Binary::Source { + name: para.binary().to_string(), + source: TryInto::try_into(para, tag, releases.iter().nth(0).map(|v| v.to_string()))?, + cache: cache.to_path_buf(), + }; + return Ok(Some(super::Parachain { id, binary })); + } + Ok(None) +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::tempdir; + + #[tokio::test] + async fn system_matches_command() -> anyhow::Result<()> { + assert!(system(1000, "polkadot", None, "v1.12.0", tempdir()?.path()).await?.is_none()); + Ok(()) + } + + #[tokio::test] + async fn system_using_relay_version() -> anyhow::Result<()> { + let version = "v1.12.0"; + let expected = Parachain::System; + let para_id = 1000; + + let temp_dir = tempdir()?; + let parachain = system(para_id, expected.binary(), None, version, temp_dir.path()) + .await? + .unwrap(); + assert_eq!(para_id, parachain.id); + assert!(matches!(parachain.binary, Binary::Source { name, source, cache } + if name == expected.binary() && source == Source::GitHub(ReleaseArchive { + owner: "r0gue-io".to_string(), + repository: "polkadot".to_string(), + tag: Some(version.to_string()), + tag_format: Some("polkadot-{tag}".to_string()), + archive: format!("{name}-{}.tar.gz", target()?), + contents: vec![expected.binary()], + latest: parachain.binary.latest().map(|l| l.to_string()), + }) && cache == temp_dir.path() + )); + Ok(()) + } + + #[tokio::test] + async fn system_works() -> anyhow::Result<()> { + let version = "v1.12.0"; + let expected = Parachain::System; + let para_id = 1000; + + let temp_dir = tempdir()?; + let parachain = system(para_id, expected.binary(), Some(version), version, temp_dir.path()) + .await? + .unwrap(); + assert_eq!(para_id, parachain.id); + assert!(matches!(parachain.binary, Binary::Source { name, source, cache } + if name == expected.binary() && source == Source::GitHub(ReleaseArchive { + owner: "r0gue-io".to_string(), + repository: "polkadot".to_string(), + tag: Some(version.to_string()), + tag_format: Some("polkadot-{tag}".to_string()), + archive: format!("{name}-{}.tar.gz", target()?), + contents: vec![expected.binary()], + latest: parachain.binary.latest().map(|l| l.to_string()), + }) && cache == temp_dir.path() + )); + Ok(()) + } + + #[tokio::test] + async fn pop_works() -> anyhow::Result<()> { + let version = "v1.0"; + let expected = Parachain::Pop; + let para_id = 2000; + + let temp_dir = tempdir()?; + let parachain = + from(para_id, expected.binary(), Some(version), temp_dir.path()).await?.unwrap(); + assert_eq!(para_id, parachain.id); + assert!(matches!(parachain.binary, Binary::Source { name, source, cache } + if name == expected.binary() && source == Source::GitHub(ReleaseArchive { + owner: "r0gue-io".to_string(), + repository: "pop-node".to_string(), + tag: Some(version.to_string()), + tag_format: None, + archive: format!("{name}-{}.tar.gz", target()?), + contents: vec![expected.binary()], + latest: parachain.binary.latest().map(|l| l.to_string()), + }) && cache == temp_dir.path() + )); + Ok(()) + } + + #[tokio::test] + async fn from_handles_unsupported_command() -> anyhow::Result<()> { + assert!(from(2000, "none", None, tempdir()?.path()).await?.is_none()); + Ok(()) + } +} diff --git a/crates/pop-parachains/src/up/relay.rs b/crates/pop-parachains/src/up/relay.rs new file mode 100644 index 000000000..7f51a7cb6 --- /dev/null +++ b/crates/pop-parachains/src/up/relay.rs @@ -0,0 +1,159 @@ +// SPDX-License-Identifier: GPL-3.0 +use super::{ + sourcing, + sourcing::{ + traits::{Source as _, *}, + GitHub::ReleaseArchive, + Source, + }, + target, Binary, Error, +}; +use std::{iter::once, path::Path}; +use strum::VariantArray as _; +use strum_macros::{EnumProperty, VariantArray}; + +/// A supported relay chain. +#[derive(Debug, EnumProperty, PartialEq, VariantArray)] +pub(super) enum RelayChain { + /// Polkadot. + #[strum(props( + Repository = "https://github.com/r0gue-io/polkadot", + Binary = "polkadot", + TagFormat = "polkadot-{tag}", + Fallback = "v1.12.0" + ))] + Polkadot, +} + +impl TryInto for &RelayChain { + /// Attempt the conversion. + /// + /// # Arguments + /// * `tag` - If applicable, a tag used to determine a specific release. + /// * `latest` - If applicable, some specifier used to determine the latest source. + fn try_into(&self, tag: Option, latest: Option) -> Result { + Ok(match self { + RelayChain::Polkadot => { + // Source from GitHub release asset + let repo = crate::GitHub::parse(self.repository())?; + Source::GitHub(ReleaseArchive { + owner: repo.org, + repository: repo.name, + tag, + tag_format: self.tag_format().map(|t| t.into()), + archive: format!("{}-{}.tar.gz", self.binary(), target()?), + contents: once(self.binary()).chain(self.workers()).collect(), + latest, + }) + }, + }) + } +} + +impl RelayChain { + /// The additional worker binaries required for the relay chain. + fn workers(&self) -> [&'static str; 2] { + ["polkadot-execute-worker", "polkadot-prepare-worker"] + } +} + +impl sourcing::traits::Source for RelayChain {} + +/// Initialises the configuration required to launch the relay chain. +/// +/// # Arguments +/// * `version` - The version of the relay chain binary to be used. +/// * `cache` - The cache to be used. +pub(super) async fn default( + version: Option<&str>, + cache: &Path, +) -> Result { + from(RelayChain::Polkadot.binary(), version, cache).await +} + +/// Initialises the configuration required to launch the relay chain using the specified command. +/// +/// # Arguments +/// * `command` - The command specified. +/// * `version` - The version of the binary to be used. +/// * `cache` - The cache to be used. +pub(super) async fn from( + command: &str, + version: Option<&str>, + cache: &Path, +) -> Result { + for relay in RelayChain::VARIANTS + .iter() + .filter(|r| command.to_lowercase().ends_with(r.binary())) + { + let name = relay.binary(); + let releases = relay.releases().await?; + let tag = Binary::resolve_version(name, version, &releases, cache); + let binary = Binary::Source { + name: name.to_string(), + source: TryInto::try_into(&relay, tag, releases.iter().nth(0).map(|v| v.to_string()))?, + cache: cache.to_path_buf(), + }; + return Ok(super::RelayChain { binary, workers: relay.workers() }); + } + return Err(Error::UnsupportedCommand(format!( + "the relay chain command is unsupported: {command}", + ))); +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::tempdir; + + #[tokio::test] + async fn default_works() -> anyhow::Result<()> { + let expected = RelayChain::Polkadot; + let version = "v1.12.0"; + let temp_dir = tempdir()?; + let relay = default(Some(version), temp_dir.path()).await?; + assert!(matches!(relay.binary, Binary::Source { name, source, cache } + if name == expected.binary() && source == Source::GitHub(ReleaseArchive { + owner: "r0gue-io".to_string(), + repository: "polkadot".to_string(), + tag: Some(version.to_string()), + tag_format: Some("polkadot-{tag}".to_string()), + archive: format!("{name}-{}.tar.gz", target()?), + contents: vec!["polkadot", "polkadot-execute-worker", "polkadot-prepare-worker"], + latest: relay.binary.latest().map(|l| l.to_string()), + }) && cache == temp_dir.path() + )); + assert_eq!(relay.workers, expected.workers()); + Ok(()) + } + + #[tokio::test] + async fn from_handles_unsupported_command() -> anyhow::Result<()> { + assert!( + matches!(from("none", None, tempdir()?.path()).await, Err(Error::UnsupportedCommand(e)) + if e == "the relay chain command is unsupported: none") + ); + Ok(()) + } + + #[tokio::test] + async fn from_handles_local_command() -> anyhow::Result<()> { + let expected = RelayChain::Polkadot; + let version = "v1.12.0"; + let temp_dir = tempdir()?; + let relay = from("./bin-v1.6.0/polkadot", Some(version), temp_dir.path()).await?; + assert!(matches!(relay.binary, Binary::Source { name, source, cache } + if name == expected.binary() && source == Source::GitHub(ReleaseArchive { + owner: "r0gue-io".to_string(), + repository: "polkadot".to_string(), + tag: Some(version.to_string()), + tag_format: Some("polkadot-{tag}".to_string()), + archive: format!("{name}-{}.tar.gz", target()?), + contents: vec!["polkadot", "polkadot-execute-worker", "polkadot-prepare-worker"], + latest: relay.binary.latest().map(|l| l.to_string()), + }) && cache == temp_dir.path() + )); + assert_eq!(relay.workers, expected.workers()); + Ok(()) + } +} diff --git a/crates/pop-parachains/src/up/sourcing.rs b/crates/pop-parachains/src/up/sourcing.rs new file mode 100644 index 000000000..5e0ef8e53 --- /dev/null +++ b/crates/pop-parachains/src/up/sourcing.rs @@ -0,0 +1,894 @@ +// SPDX-License-Identifier: GPL-3.0 +use crate::{Error, Git, Status, APP_USER_AGENT}; +use duct::cmd; +use flate2::read::GzDecoder; +use reqwest::StatusCode; +use std::time::Duration; +use std::{ + fs::{copy, metadata, read_dir, rename, File}, + io::{BufRead, Seek, SeekFrom, Write}, + os::unix::fs::PermissionsExt, + path::{Path, PathBuf}, +}; +use tar::Archive; +use tempfile::{tempdir, tempfile}; +use url::Url; + +/// The source of a binary. +#[derive(Clone, Debug, PartialEq)] +pub(crate) enum Source { + /// An archive for download. + #[allow(dead_code)] + Archive { + /// The url of the archive. + url: String, + /// The archive contents required, including the binary name. + contents: Vec, + }, + /// A git repository. + Git { + /// The url of the repository. + url: Url, + /// If applicable, the branch, tag or commit. + reference: Option, + /// If applicable, a specification of the path to the manifest. + manifest: Option, + /// The name of the package to be built. + package: String, + /// Any additional build artifacts which are required. + artifacts: Vec, + }, + /// A GitHub repository. + GitHub(GitHub), + /// A URL for download. + #[allow(dead_code)] + Url { + /// The URL for download. + url: String, + /// The name of the binary. + name: String, + }, +} + +impl Source { + /// Sources the binary. + /// + /// # Arguments + /// + /// * `cache` - the cache to be used. + /// * `release` - whether any binaries needing to be built should be done so using the release profile. + /// * `status` - used to observe status updates. + /// * `verbose` - whether verbose output is required. + pub(super) async fn source( + &self, + cache: &Path, + release: bool, + status: &impl Status, + verbose: bool, + ) -> Result<(), Error> { + use Source::*; + match self { + Archive { url, contents } => { + let contents: Vec<_> = + contents.iter().map(|name| (name.as_str(), cache.join(&name))).collect(); + from_archive(&url, &contents, status).await + }, + Git { url, reference, manifest, package, artifacts } => { + let artifacts: Vec<_> = artifacts + .iter() + .map(|name| match reference { + Some(version) => (name.as_str(), cache.join(&format!("{name}-{version}"))), + None => (name.as_str(), cache.join(&name)), + }) + .collect(); + from_git( + url.as_str(), + reference.as_deref(), + manifest.as_ref(), + package, + &artifacts, + release, + status, + verbose, + ) + .await + }, + GitHub(source) => source.source(cache, release, status, verbose).await, + Url { url, name } => from_url(url, &cache.join(name), status).await, + } + } +} + +/// A binary sourced from GitHub. +#[derive(Clone, Debug, PartialEq)] +pub(crate) enum GitHub { + /// An archive for download from a GitHub release. + ReleaseArchive { + /// The owner of the repository - i.e. https://github.com/{owner}/repository. + owner: String, + /// The name of the repository - i.e. https://github.com/owner/{repository}. + repository: String, + /// The release tag to be used, where `None` is latest. + tag: Option, + /// If applicable, any formatting for the release tag. + tag_format: Option, + /// The name of the archive (asset) to download. + archive: String, + /// The archive contents required, including the binary name. + contents: Vec<&'static str>, + /// If applicable, the latest release tag available. + latest: Option, + }, + /// A source code archive for download from GitHub. + SourceCodeArchive { + /// The owner of the repository - i.e. https://github.com/{owner}/repository. + owner: String, + /// The name of the repository - i.e. https://github.com/owner/{repository}. + repository: String, + /// If applicable, the branch, tag or commit. + reference: Option, + /// If applicable, a specification of the path to the manifest. + manifest: Option, + /// The name of the package to be built. + package: String, + /// Any additional artifacts which are required. + artifacts: Vec, + }, +} + +impl GitHub { + /// Sources the binary. + /// + /// # Arguments + /// + /// * `cache` - the cache to be used. + /// * `release` - whether any binaries needing to be built should be done so using the release profile. + /// * `status` - used to observe status updates. + /// * `verbose` - whether verbose output is required. + async fn source( + &self, + cache: &Path, + release: bool, + status: &impl Status, + verbose: bool, + ) -> Result<(), Error> { + use GitHub::*; + match self { + ReleaseArchive { owner, repository, tag, tag_format, archive, contents, .. } => { + // Complete url and contents based on tag + let base_url = format!("https://github.com/{owner}/{repository}/releases"); + let url = match tag.as_ref() { + Some(tag) => { + let tag = tag_format.as_ref().map_or_else( + || tag.to_string(), + |tag_format| tag_format.replace("{tag}", tag), + ); + format!("{base_url}/download/{tag}/{archive}") + }, + None => format!("{base_url}/latest/download/{archive}"), + }; + let contents: Vec<_> = contents + .iter() + .map(|name| match tag.as_ref() { + Some(tag) => (*name, cache.join(&format!("{name}-{tag}"))), + None => (*name, cache.join(&name)), + }) + .collect(); + from_archive(&url, &contents, status).await + }, + SourceCodeArchive { owner, repository, reference, manifest, package, artifacts } => { + let artifacts: Vec<_> = artifacts + .iter() + .map(|name| match reference { + Some(reference) => { + (name.as_str(), cache.join(&format!("{name}-{reference}"))) + }, + None => (name.as_str(), cache.join(&name)), + }) + .collect(); + from_github_archive( + owner, + repository, + reference.as_ref().map(|r| r.as_str()), + manifest.as_ref(), + package, + &artifacts, + release, + status, + verbose, + ) + .await + }, + } + } +} + +/// Source binary by downloading and extracting from an archive. +/// +/// # Arguments +/// * `url` - The url of the archive. +/// * `contents` - The contents within the archive which are required. +/// * `status` - Used to observe status updates. +async fn from_archive( + url: &str, + contents: &[(&str, PathBuf)], + status: &impl Status, +) -> Result<(), Error> { + // Download archive + status.update(&format!("Downloading from {url}...")); + let response = reqwest::get(url).await?.error_for_status()?; + let mut file = tempfile()?; + file.write_all(&response.bytes().await?)?; + file.seek(SeekFrom::Start(0))?; + // Extract contents + status.update("Extracting from archive..."); + let tar = GzDecoder::new(file); + let mut archive = Archive::new(tar); + let temp_dir = tempdir()?; + let working_dir = temp_dir.path(); + archive.unpack(working_dir)?; + for (name, dest) in contents { + rename(working_dir.join(name), dest)?; + } + status.update("Sourcing complete."); + Ok(()) +} + +/// Source binary by cloning a git repository and then building. +/// +/// # Arguments +/// * `url` - The url of the repository. +/// * `reference` - If applicable, the branch, tag or commit. +/// * `manifest` - If applicable, a specification of the path to the manifest. +/// * `package` - The name of the package to be built. +/// * `artifacts` - Any additional artifacts which are required. +/// * `release` - Whether to build optimized artifacts using the release profile. +/// * `status` - Used to observe status updates. +/// * `verbose` - Whether verbose output is required. +async fn from_git( + url: &str, + reference: Option<&str>, + manifest: Option>, + package: &str, + artifacts: &[(&str, impl AsRef)], + release: bool, + status: &impl Status, + verbose: bool, +) -> Result<(), Error> { + // Clone repository into working directory + let temp_dir = tempdir()?; + let working_dir = temp_dir.path(); + status.update(&format!("Cloning {url}...")); + Git::clone(&Url::parse(url)?, &working_dir, reference.as_deref())?; + // Build binaries + status.update("Starting build of binary..."); + let manifest = manifest + .as_ref() + .map_or_else(|| working_dir.join("Cargo.toml"), |m| working_dir.join(m)); + build(manifest, package, artifacts, release, status, verbose).await?; + status.update("Sourcing complete."); + Ok(()) +} + +/// Source binary by downloading from a source code archive and then building. +/// +/// # Arguments +/// * `owner` - The owner of the repository. +/// * `repository` - The name of the repository. +/// * `reference` - If applicable, the branch, tag or commit. +/// * `manifest` -If applicable, a specification of the path to the manifest. +/// * `package` - The name of the package to be built. +/// * `artifacts` - Any additional artifacts which are required. +/// * `release` - Whether to build optimized artifacts using the release profile. +/// * `status` - Used to observe status updates. +/// * `verbose` - Whether verbose output is required. +async fn from_github_archive( + owner: &str, + repository: &str, + reference: Option<&str>, + manifest: Option>, + package: &str, + artifacts: &[(&str, impl AsRef)], + release: bool, + status: &impl Status, + verbose: bool, +) -> Result<(), Error> { + // User agent required when using GitHub API + let client = reqwest::ClientBuilder::new().user_agent(APP_USER_AGENT).build()?; + let response = + match reference { + Some(reference) => { + // Various potential urls to try based on not knowing the type of ref + let urls = [ + format!("https://github.com/{owner}/{repository}/archive/refs/heads/{reference}.tar.gz"), + format!("https://github.com/{owner}/{repository}/archive/refs/tags/{reference}.tar.gz"), + format!("https://github.com/{owner}/{repository}/archive/{reference}.tar.gz"), + ]; + let mut response = None; + for url in urls { + status.update(&format!("Downloading from {url}...")); + response = Some(client.get(url).send().await?.error_for_status()); + if let Some(Err(e)) = &response { + if e.status() == Some(StatusCode::NOT_FOUND) { + tokio::time::sleep(Duration::from_secs(1)).await; + continue; + } + } + break; + } + response.expect("value set above")? + }, + None => { + let url = format!("https://api.github.com/repos/{owner}/{repository}/tarball"); + status.update(&format!("Downloading from {url}...")); + client.get(url).send().await?.error_for_status()? + }, + }; + let mut file = tempfile()?; + file.write_all(&response.bytes().await?)?; + file.seek(SeekFrom::Start(0))?; + // Extract contents + status.update("Extracting from archive..."); + let tar = GzDecoder::new(file); + let mut archive = Archive::new(tar); + let temp_dir = tempdir()?; + let mut working_dir = temp_dir.path().into(); + archive.unpack(&working_dir)?; + // Prepare archive contents for build + let entries: Vec<_> = read_dir(&working_dir)?.take(2).filter_map(|x| x.ok()).collect(); + match entries.len() { + 0 => { + return Err(Error::ArchiveError( + "The downloaded archive does not contain any entries.".into(), + )) + }, + 1 => working_dir = entries[0].path(), // Automatically switch to top level directory + _ => {}, // Assume that downloaded archive does not have a top level directory + } + // Build binaries + status.update("Starting build of binary..."); + let manifest = manifest + .as_ref() + .map_or_else(|| working_dir.join("Cargo.toml"), |m| working_dir.join(m)); + build(&manifest, package, artifacts, release, status, verbose).await?; + status.update("Sourcing complete."); + Ok(()) +} + +/// Source binary by building a local package. +/// +/// # Arguments +/// * `manifest` - The path to the local package manifest. +/// * `package` - The name of the package to be built. +/// * `release` - Whether to build optimized artifacts using the release profile. +/// * `status` - Used to observe status updates. +/// * `verbose` - Whether verbose output is required. +pub(crate) async fn from_local_package( + manifest: &Path, + package: &str, + release: bool, + status: &impl Status, + verbose: bool, +) -> Result<(), Error> { + // Build binaries + status.update("Starting build of binary..."); + const EMPTY: [(&str, PathBuf); 0] = []; + build(manifest, package, &EMPTY, release, status, verbose).await?; + status.update("Sourcing complete."); + Ok(()) +} + +/// Source binary by downloading from a url. +/// +/// # Arguments +/// * `url` - The url of the binary. +/// * `path` - The (local) destination path. +/// * `status` - Used to observe status updates. +async fn from_url(url: &str, path: &Path, status: &impl Status) -> Result<(), Error> { + // Download required version of binaries + status.update(&format!("Downloading from {url}...")); + download(url, path).await?; + status.update("Sourcing complete."); + Ok(()) +} + +/// Builds a package. +/// +/// # Arguments +/// * `manifest` - The path to the manifest. +/// * `package` - The name of the package to be built. +/// * `artifacts` - Any additional artifacts which are required. +/// * `release` - Whether to build optimized artifacts using the release profile. +/// * `status` - Used to observe status updates. +/// * `verbose` - Whether verbose output is required. +async fn build( + manifest: impl AsRef, + package: &str, + artifacts: &[(&str, impl AsRef)], + release: bool, + status: &impl Status, + verbose: bool, +) -> Result<(), Error> { + // Define arguments + let manifest_path = manifest.as_ref().to_str().expect("expected manifest path to be valid"); + let mut args = vec!["build", "-p", package, "--manifest-path", manifest_path]; + if release { + args.push("--release") + } + // Build binaries + let command = cmd("cargo", args); + match verbose { + false => { + let reader = command.stderr_to_stdout().reader()?; + let mut output = std::io::BufReader::new(reader).lines(); + while let Some(line) = output.next() { + status.update(&line?); + } + }, + true => { + command.run()?; + }, + } + // Copy required artifacts to destination path + let target = manifest + .as_ref() + .parent() + .expect("") + .join(format!("target/{}", if release { "release" } else { "debug" })); + for (name, dest) in artifacts { + copy(target.join(&name), dest)?; + } + Ok(()) +} + +/// Downloads a file from a URL. +/// +/// # Arguments +/// * `url` - The url of the file. +/// * `path` - The (local) destination path. +async fn download(url: &str, dest: &Path) -> Result<(), Error> { + // Download to destination path + let response = reqwest::get(url).await?.error_for_status()?; + let mut file = File::create(&dest)?; + file.write_all(&response.bytes().await?)?; + // Make executable + let mut perms = metadata(dest)?.permissions(); + perms.set_mode(0o755); + std::fs::set_permissions(dest, perms)?; + Ok(()) +} + +#[cfg(test)] +pub(super) mod tests { + use super::{super::target, GitHub::*, *}; + use tempfile::tempdir; + + #[tokio::test] + async fn sourcing_from_archive_works() -> anyhow::Result<()> { + let url = "https://github.com/r0gue-io/polkadot/releases/latest/download/polkadot-aarch64-apple-darwin.tar.gz".to_string(); + let name = "polkadot".to_string(); + let contents = + vec![name.clone(), "polkadot-execute-worker".into(), "polkadot-prepare-worker".into()]; + let temp_dir = tempdir()?; + + Source::Archive { url, contents: contents.clone() } + .source(temp_dir.path(), true, &Output, true) + .await?; + for item in contents { + assert!(temp_dir.path().join(item).exists()); + } + Ok(()) + } + + #[tokio::test] + async fn sourcing_from_git_works() -> anyhow::Result<()> { + let url = Url::parse("https://github.com/hpaluch/rust-hello-world")?; + let package = "hello_world".to_string(); + let temp_dir = tempdir()?; + + Source::Git { + url, + reference: None, + manifest: None, + package: package.clone(), + artifacts: vec![package.clone()], + } + .source(temp_dir.path(), true, &Output, true) + .await?; + assert!(temp_dir.path().join(package).exists()); + Ok(()) + } + + #[tokio::test] + async fn sourcing_from_git_ref_works() -> anyhow::Result<()> { + let url = Url::parse("https://github.com/hpaluch/rust-hello-world")?; + let initial_commit = "436b7dbffdfaaf7ad90bf44ae8fdcb17eeee65a3".to_string(); + let package = "hello_world".to_string(); + let temp_dir = tempdir()?; + + Source::Git { + url, + reference: Some(initial_commit.clone()), + manifest: None, + package: package.clone(), + artifacts: vec![package.clone()], + } + .source(temp_dir.path(), true, &Output, true) + .await?; + assert!(temp_dir.path().join(format!("{package}-{initial_commit}")).exists()); + Ok(()) + } + + #[tokio::test] + async fn sourcing_from_github_release_archive_works() -> anyhow::Result<()> { + let owner = "r0gue-io".to_string(); + let repository = "polkadot".to_string(); + let tag = "v1.12.0"; + let tag_format = Some("polkadot-{tag}".to_string()); + let name = "polkadot".to_string(); + let archive = format!("{name}-{}.tar.gz", target()?); + let contents = ["polkadot", "polkadot-execute-worker", "polkadot-prepare-worker"]; + let temp_dir = tempdir()?; + + Source::GitHub(ReleaseArchive { + owner, + repository, + tag: Some(tag.to_string()), + tag_format, + archive, + contents: contents.to_vec(), + latest: None, + }) + .source(temp_dir.path(), true, &Output, true) + .await?; + for item in contents { + assert!(temp_dir.path().join(format!("{item}-{tag}")).exists()); + } + Ok(()) + } + + #[tokio::test] + async fn sourcing_from_latest_github_release_archive_works() -> anyhow::Result<()> { + let owner = "r0gue-io".to_string(); + let repository = "polkadot".to_string(); + let tag_format = Some("polkadot-{tag}".to_string()); + let name = "polkadot".to_string(); + let archive = format!("{name}-{}.tar.gz", target()?); + let contents = ["polkadot", "polkadot-execute-worker", "polkadot-prepare-worker"]; + let temp_dir = tempdir()?; + + Source::GitHub(ReleaseArchive { + owner, + repository, + tag: None, + tag_format, + archive, + contents: contents.to_vec(), + latest: None, + }) + .source(temp_dir.path(), true, &Output, true) + .await?; + for item in contents { + assert!(temp_dir.path().join(item).exists()); + } + Ok(()) + } + + #[tokio::test] + async fn sourcing_from_github_source_code_archive_works() -> anyhow::Result<()> { + let owner = "paritytech".to_string(); + let repository = "polkadot-sdk".to_string(); + let package = "polkadot".to_string(); + let temp_dir = tempdir()?; + let initial_commit = "72dba98250a6267c61772cd55f8caf193141050f"; + let manifest = PathBuf::from("substrate/Cargo.toml"); + + Source::GitHub(SourceCodeArchive { + owner, + repository, + reference: Some(initial_commit.to_string()), + manifest: Some(manifest), + package: package.clone(), + artifacts: vec![package.clone()], + }) + .source(temp_dir.path(), true, &Output, true) + .await?; + assert!(temp_dir.path().join(format!("{package}-{initial_commit}")).exists()); + Ok(()) + } + + #[tokio::test] + async fn sourcing_from_latest_github_source_code_archive_works() -> anyhow::Result<()> { + let owner = "hpaluch".to_string(); + let repository = "rust-hello-world".to_string(); + let package = "hello_world".to_string(); + let temp_dir = tempdir()?; + + Source::GitHub(SourceCodeArchive { + owner, + repository, + reference: None, + manifest: None, + package: package.clone(), + artifacts: vec![package.clone()], + }) + .source(temp_dir.path(), true, &Output, true) + .await?; + assert!(temp_dir.path().join(package).exists()); + Ok(()) + } + + #[tokio::test] + async fn sourcing_from_url_works() -> anyhow::Result<()> { + let url = + "https://github.com/paritytech/polkadot-sdk/releases/latest/download/polkadot.asc" + .to_string(); + let name = "polkadot"; + let temp_dir = tempdir()?; + + Source::Url { url, name: name.into() } + .source(temp_dir.path(), false, &Output, true) + .await?; + assert!(temp_dir.path().join(&name).exists()); + Ok(()) + } + + #[tokio::test] + async fn from_archive_works() -> anyhow::Result<()> { + let temp_dir = tempdir()?; + let url = "https://github.com/r0gue-io/polkadot/releases/latest/download/polkadot-aarch64-apple-darwin.tar.gz"; + let contents: Vec<_> = ["polkadot", "polkadot-execute-worker", "polkadot-prepare-worker"] + .into_iter() + .map(|b| (b, temp_dir.path().join(b))) + .collect(); + + from_archive(url, &contents, &Output).await?; + for (_, file) in contents { + assert!(file.exists()); + } + Ok(()) + } + + #[tokio::test] + async fn from_git_works() -> anyhow::Result<()> { + let url = "https://github.com/hpaluch/rust-hello-world"; + let package = "hello_world"; + let initial_commit = "436b7dbffdfaaf7ad90bf44ae8fdcb17eeee65a3"; + let temp_dir = tempdir()?; + let path = temp_dir.path().join(package); + + from_git( + url, + Some(initial_commit), + None::<&Path>, + &package, + &[(&package, &path)], + true, + &Output, + false, + ) + .await?; + assert!(path.exists()); + Ok(()) + } + + #[tokio::test] + async fn from_github_archive_works() -> anyhow::Result<()> { + let owner = "paritytech"; + let repository = "polkadot-sdk"; + let package = "polkadot"; + let temp_dir = tempdir()?; + let path = temp_dir.path().join(package); + let initial_commit = "72dba98250a6267c61772cd55f8caf193141050f"; + let manifest = "substrate/Cargo.toml"; + + from_github_archive( + owner, + repository, + Some(initial_commit), + Some(manifest), + package, + &[(package, &path)], + true, + &Output, + true, + ) + .await?; + assert!(path.exists()); + Ok(()) + } + + #[tokio::test] + async fn from_latest_github_archive_works() -> anyhow::Result<()> { + let owner = "hpaluch"; + let repository = "rust-hello-world"; + let package = "hello_world"; + let temp_dir = tempdir()?; + let path = temp_dir.path().join(package); + + from_github_archive( + owner, + repository, + None, + None::<&Path>, + package, + &[(package, &path)], + true, + &Output, + true, + ) + .await?; + assert!(path.exists()); + Ok(()) + } + + #[tokio::test] + async fn from_local_package_works() -> anyhow::Result<()> { + let temp_dir = tempdir()?; + let name = "hello_world"; + cmd("cargo", ["new", name, "--bin"]).dir(temp_dir.path()).run()?; + let manifest = temp_dir.path().join(name).join("Cargo.toml"); + + from_local_package(&manifest, name, false, &Output, true).await?; + assert!(manifest.parent().unwrap().join("target/debug").join(name).exists()); + Ok(()) + } + + #[tokio::test] + async fn from_url_works() -> anyhow::Result<()> { + let url = + "https://github.com/paritytech/polkadot-sdk/releases/latest/download/polkadot.asc"; + let temp_dir = tempdir()?; + let path = temp_dir.path().join("polkadot"); + + from_url(url, &path, &Output).await?; + assert!(path.exists()); + assert_ne!(metadata(path)?.permissions().mode() & 0o755, 0); + Ok(()) + } + + pub(crate) struct Output; + impl Status for Output { + fn update(&self, status: &str) { + println!("{status}") + } + } +} + +pub(crate) mod traits { + use crate::Error; + use strum::EnumProperty; + + pub(crate) trait Source: EnumProperty { + /// The name of the binary. + fn binary(&self) -> &'static str { + self.get_str("Binary").expect("expected specification of `Binary` name") + } + + /// The fallback version to be used when the latest version cannot be determined. + fn fallback(&self) -> &str { + self.get_str("Fallback") + .expect("expected specification of `Fallback` release tag") + } + + /// Whether pre-releases are to be used. + fn prerelease(&self) -> Option { + self.get_str("Prerelease") + .map(|v| v.parse().expect("expected parachain prerelease value to be true/false")) + } + + /// Determine the available releases from the source. + async fn releases(&self) -> Result, Error> { + let repo = crate::GitHub::parse(self.repository())?; + let releases = match repo.releases().await { + Ok(releases) => releases, + Err(_) => return Ok(vec![self.fallback().to_string()]), + }; + let prerelease = self.prerelease(); + let tag_format = self.tag_format(); + Ok(releases + .iter() + .filter(|r| match prerelease { + None => !r.prerelease, // Exclude pre-releases by default + Some(prerelease) => r.prerelease == prerelease, + }) + .map(|r| { + if let Some(tag_format) = tag_format { + // simple for now, could be regex in future + let tag_format = tag_format.replace("{tag}", ""); + r.tag_name.replace(&tag_format, "") + } else { + r.tag_name.clone() + } + }) + .collect()) + } + + /// The repository to be used. + fn repository(&self) -> &str { + self.get_str("Repository").expect("expected specification of `Repository` url") + } + + /// If applicable, any tag format to be used - e.g. `polkadot-{tag}`. + fn tag_format(&self) -> Option<&str> { + self.get_str("TagFormat") + } + } + + /// An attempted conversion into a Source. + pub(crate) trait TryInto { + /// Attempt the conversion. + /// + /// # Arguments + /// * `specifier` - If applicable, some specifier used to determine a specific source. + /// * `latest` - If applicable, some specifier used to determine the latest source. + fn try_into( + &self, + specifier: Option, + latest: Option, + ) -> Result; + } + + #[cfg(test)] + mod tests { + use super::Source; + use strum_macros::{EnumProperty, VariantArray}; + + #[derive(EnumProperty, VariantArray)] + pub(super) enum Chain { + #[strum(props( + Repository = "https://github.com/paritytech/polkadot-sdk", + Binary = "polkadot", + Prerelease = "false", + Fallback = "v1.12.0", + TagFormat = "polkadot-{tag}" + ))] + Polkadot, + #[strum(props( + Repository = "https://github.com/r0gue-io/fallback", + Fallback = "v1.0" + ))] + Fallback, + } + + impl Source for Chain {} + + #[test] + fn binary_works() { + assert_eq!("polkadot", Chain::Polkadot.binary()) + } + + #[test] + fn fallback_works() { + assert_eq!("v1.12.0", Chain::Polkadot.fallback()) + } + + #[test] + fn prerelease_works() { + assert!(!Chain::Polkadot.prerelease().unwrap()) + } + + #[tokio::test] + async fn releases_works() -> anyhow::Result<()> { + assert!(!Chain::Polkadot.releases().await?.is_empty()); + Ok(()) + } + + #[tokio::test] + async fn releases_uses_fallback() -> anyhow::Result<()> { + let chain = Chain::Fallback; + assert_eq!(chain.fallback(), chain.releases().await?[0]); + Ok(()) + } + + #[test] + fn repository_works() { + assert_eq!("https://github.com/paritytech/polkadot-sdk", Chain::Polkadot.repository()) + } + + #[test] + fn tag_format_works() { + assert_eq!("polkadot-{tag}", Chain::Polkadot.tag_format().unwrap()) + } + } +} diff --git a/crates/pop-parachains/src/utils/git.rs b/crates/pop-parachains/src/utils/git.rs index d0ed22525..ad9004b55 100644 --- a/crates/pop-parachains/src/utils/git.rs +++ b/crates/pop-parachains/src/utils/git.rs @@ -12,36 +12,37 @@ use url::Url; /// A helper for handling Git operations. pub struct Git; impl Git { - pub(crate) fn clone(url: &Url, working_dir: &Path, branch: Option<&str>) -> Result<()> { - if !working_dir.exists() { - let mut fo = FetchOptions::new(); + pub(crate) fn clone(url: &Url, working_dir: &Path, reference: Option<&str>) -> Result<()> { + let mut fo = FetchOptions::new(); + if reference.is_none() { fo.depth(1); - let mut repo = RepoBuilder::new(); - repo.fetch_options(fo); - if let Some(branch) = branch { - repo.branch(branch); - } - if let Err(_e) = repo.clone(url.as_str(), working_dir) { - Self::ssh_clone(url, working_dir, branch)?; - } + } + let mut repo = RepoBuilder::new(); + repo.fetch_options(fo); + let repo = match repo.clone(url.as_str(), working_dir) { + Ok(repository) => repository, + Err(e) => match Self::ssh_clone(url, working_dir) { + Ok(repository) => repository, + Err(_) => return Err(e.into()), + }, + }; + + if let Some(reference) = reference { + let object = repo.revparse_single(&reference).expect("Object not found"); + repo.checkout_tree(&object, None).expect("Failed to checkout"); } Ok(()) } - pub(crate) fn ssh_clone(url: &Url, working_dir: &Path, branch: Option<&str>) -> Result<()> { + + fn ssh_clone(url: &Url, working_dir: &Path) -> Result { let ssh_url = GitHub::convert_to_ssh_url(url); - if !working_dir.exists() { - // Prepare callback and fetch options. - let mut fo = FetchOptions::new(); - Self::set_up_ssh_fetch_options(&mut fo)?; - // Prepare builder and clone. - let mut repo = RepoBuilder::new(); - repo.fetch_options(fo); - if let Some(branch) = branch { - repo.branch(branch); - } - repo.clone(&ssh_url, working_dir)?; - } - Ok(()) + // Prepare callback and fetch options. + let mut fo = FetchOptions::new(); + Self::set_up_ssh_fetch_options(&mut fo)?; + // Prepare builder and clone. + let mut repo = RepoBuilder::new(); + repo.fetch_options(fo); + Ok(repo.clone(&ssh_url, working_dir)?) } /// Clone a Git repository and degit it. @@ -186,7 +187,7 @@ impl GitHub { } /// Fetch the latest releases of the GitHub repository. - pub async fn get_latest_releases(&self) -> Result> { + pub async fn releases(&self) -> Result> { let client = reqwest::ClientBuilder::new().user_agent(APP_USER_AGENT).build()?; let url = self.api_releases_url(); let response = client.get(url).send().await?.error_for_status()?; @@ -257,6 +258,7 @@ impl GitHub { .ok_or(Error::Git("the repository name is missing from the github url".to_string()))?) } + #[cfg(test)] pub(crate) fn release(repo: &Url, tag: &str, artifact: &str) -> String { format!("{}/releases/download/{tag}/{artifact}", repo.as_str()) } @@ -324,7 +326,7 @@ mod tests { }]"#; let repo = GitHub::parse(BASE_PARACHAIN)?.with_api(&mock_server.url()); let mock = releases_mock(&mut mock_server, &repo, expected_payload).await; - let latest_release = repo.get_latest_releases().await?; + let latest_release = repo.releases().await?; assert_eq!( latest_release[0], Release {