diff --git a/crates/cli/src/commands/order/calldata.rs b/crates/cli/src/commands/order/calldata.rs index 7a0b57825..7cf3ff5ab 100644 --- a/crates/cli/src/commands/order/calldata.rs +++ b/crates/cli/src/commands/order/calldata.rs @@ -38,10 +38,10 @@ impl Execute for AddOrderCalldata { None => None, }; let order = DotrainOrder::new(dotrain, settings).await?; - let dotrain_string = order.dotrain.clone(); + let dotrain_string = order.dotrain().to_string(); let config_deployment = order - .config + .config() .deployments .get(&self.deployment) .ok_or(anyhow!("specified deployment is undefined!"))?; diff --git a/crates/cli/src/commands/order/filter.rs b/crates/cli/src/commands/order/filter.rs new file mode 100644 index 000000000..978f95a4b --- /dev/null +++ b/crates/cli/src/commands/order/filter.rs @@ -0,0 +1,179 @@ +use crate::execute::Execute; +use anyhow::{anyhow, Result}; +use clap::{ArgAction, Parser}; +use rain_orderbook_common::dotrain_order::DotrainOrder; +use std::fs::read_to_string; +use std::path::PathBuf; + +/// Generate a new .rain with unused frontmatter cleaned, ie frontmatter will only include the +/// specified deployments (and their related fields) from a given .rain and an optional setting.yml +#[derive(Parser, Clone)] +pub struct Filter { + /// Path to the .rain file + #[arg(short = 'f', long, value_name = "PATH")] + dotrain_file: PathBuf, + + /// Path to the settings yaml file + #[arg(short = 'c', long, value_name = "PATH")] + settings_file: Option, + + /// List of deployment keys to include in the output .rain frontmatter + #[arg(short = 'e', long, value_name = "DEPLOYMENT", num_args = 1..)] + deployments: Vec, + + /// Optional output file path to write the result into + #[arg(short = 'o', long, value_name = "PATH")] + pub output: Option, + + /// Print the result on console (send result to std out) + #[arg(long, action = ArgAction::SetTrue)] + pub stdout: bool, +} + +impl Execute for Filter { + async fn execute(&self) -> Result<()> { + // read inpput files + let dotrain = read_to_string(self.dotrain_file.clone()).map_err(|e| anyhow!(e))?; + let settings = match &self.settings_file { + Some(settings_file) => { + Some(read_to_string(settings_file.clone()).map_err(|e| anyhow!(e))?) + } + None => None, + }; + + // generate new dotrain order instance with cleaned up frontmatter + let order = DotrainOrder::new_with_frontmatter_filtered_by_deployment( + dotrain, + settings, + &self + .deployments + .iter() + .map(String::as_str) + .collect::>(), + ) + .await?; + + // handle output + if let Some(output) = &self.output { + std::fs::write(output, order.dotrain())?; + } + if self.stdout { + println!("{}", order.dotrain()); + } + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use clap::CommandFactory; + + #[test] + fn verify_cli() { + Filter::command().debug_assert(); + } + + #[tokio::test] + async fn test_execute_happy() { + let setting = r#" +networks: + some-network: + rpc: https://abcd.com + chain-id: 123 + network-id: 123 + currency: ETH + +subgraphs: + some-sg: https://www.some-sg.com + +deployers: + some-deployer: + network: some-network + address: 0xF14E09601A47552De6aBd3A0B165607FaFd2B5Ba + +orderbooks: + some-orderbook: + address: 0xc95A5f8eFe14d7a20BD2E5BAFEC4E71f8Ce0B9A6 + network: some-network + subgraph: some-sg +"#; + let dotrain = r#" +tokens: + token1: + network: some-network + address: 0xc2132d05d31c914a87c6611c10748aeb04b58e8f + decimals: 6 + label: T1 + symbol: T1 + token2: + network: some-network + address: 0x8f3cf7ad23cd3cadbd9735aff958023239c6a063 + decimals: 18 + label: T2 + symbol: T2 + +scenarios: + some-scenario: + network: some-network + deployer: some-deployer + scenarios: + child-scenario: + bindings: + key1: value1 + +orders: + some-order: + inputs: + - token: token1 + vault-id: 1 + outputs: + - token: token1 + vault-id: 1 + deployer: some-deployer + orderbook: some-orderbook + +deployments: + some-deployment: + scenario: some-scenario.child-scenario + order: some-order +--- +#calculate-io +_ _: 0 0; +#handle-io +:; +#handle-add-order +:;"#; + + let dotrain_path = "./test_dotrain_filter.rain"; + let settings_path = "./test_settings_filter.yml"; + std::fs::write(dotrain_path, dotrain).unwrap(); + std::fs::write(settings_path, setting).unwrap(); + + let filter = Filter { + dotrain_file: dotrain_path.into(), + settings_file: Some(settings_path.into()), + deployments: vec!["some-deployment".to_string()], + output: None, + stdout: true, + }; + + assert!(filter.execute().await.is_ok()); + + std::fs::remove_file(dotrain_path).unwrap(); + std::fs::remove_file(settings_path).unwrap(); + } + + #[tokio::test] + async fn test_execute_unhappy() { + let filter = Filter { + dotrain_file: "./bad-path/test.rain".into(), + settings_file: None, + deployments: vec!["some-deployment".to_string()], + output: None, + stdout: true, + }; + + assert!(filter.execute().await.is_err()); + } +} diff --git a/crates/cli/src/commands/order/mod.rs b/crates/cli/src/commands/order/mod.rs index 3e3e5aa05..52bce6652 100644 --- a/crates/cli/src/commands/order/mod.rs +++ b/crates/cli/src/commands/order/mod.rs @@ -2,6 +2,7 @@ mod add; mod calldata; mod compose; mod detail; +mod filter; mod list; mod orderbook_address; mod remove; @@ -13,6 +14,7 @@ use anyhow::Result; use calldata::AddOrderCalldata; use clap::Parser; use compose::Compose; +use filter::Filter; use detail::CliOrderDetailArgs; use list::CliOrderListArgs; @@ -46,6 +48,9 @@ pub enum Order { alias = "ob-addr" )] OrderbookAddress(OrderbookAddress), + + #[command()] + Filter(Filter), } impl Execute for Order { @@ -58,6 +63,7 @@ impl Execute for Order { Order::Compose(compose) => compose.execute().await, Order::Calldata(calldata) => calldata.execute().await, Order::OrderbookAddress(orderbook_address) => orderbook_address.execute().await, + Order::Filter(filter) => filter.execute().await, } } } diff --git a/crates/cli/src/commands/order/orderbook_address.rs b/crates/cli/src/commands/order/orderbook_address.rs index c19efa73c..5ef93352c 100644 --- a/crates/cli/src/commands/order/orderbook_address.rs +++ b/crates/cli/src/commands/order/orderbook_address.rs @@ -2,7 +2,6 @@ use crate::execute::Execute; use crate::output::{output, SupportedOutputEncoding}; use anyhow::{anyhow, Result}; use clap::Parser; -use rain_orderbook_app_settings::Config; use rain_orderbook_common::dotrain_order::DotrainOrder; use std::fs::read_to_string; use std::path::PathBuf; @@ -36,7 +35,7 @@ impl Execute for OrderbookAddress { None => None, }; let order = DotrainOrder::new(dotrain, settings).await?; - let order_config: Config = order.clone().config; + let order_config = order.config().clone(); let deployment_ref = order_config .deployments .get(&self.deployment) diff --git a/crates/common/src/dotrain_order/filter.rs b/crates/common/src/dotrain_order/filter.rs new file mode 100644 index 000000000..d884daba3 --- /dev/null +++ b/crates/common/src/dotrain_order/filter.rs @@ -0,0 +1,711 @@ +use crate::dotrain_order::{DotrainOrder, DotrainOrderError}; +pub use rain_metadata::types::authoring::v2::*; +use rain_orderbook_app_settings::config_source::ConfigSource; + +impl DotrainOrder { + /// Creates a new instance with a clean frontmatter that only includes the + /// specified deployments and their related fields + pub async fn new_with_frontmatter_filtered_by_deployment( + dotrain: String, + config: Option, + deployments: &[&str], + ) -> Result { + Self::new(dotrain, config) + .await? + .filter_by_deployment(deployments) + .await + } + + /// Generates a new instance with a frontmatter that only includes the + /// specified deployments and their related fields + pub async fn filter_by_deployment( + &self, + deployments: &[&str], + ) -> Result { + // new empty config to copy used fields into + let mut new_config_source = ConfigSource::default(); + let config_source = &self.config_source; + + for deployment in deployments { + // find and insert the specified deployment + let deployment_ref = self.config.deployments.get(*deployment).ok_or( + DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Deployment \"{}\" not found", + deployment + )), + )?; + new_config_source.deployments.insert( + deployment.to_string(), + config_source + .deployments + .get(*deployment) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Deployment \"{}\" not found", + deployment + )))? + .clone(), + ); + + // find and insert the deployment scenario + // make sure to get the root scenario key + let scenario_ref = &deployment_ref.scenario; + let scenario_key = scenario_ref.name.split('.').nth(0).unwrap(); + new_config_source.scenarios.insert( + scenario_key.to_string(), + config_source + .scenarios + .get(scenario_key) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Scenario \"{}\" not found", + scenario_key + )))? + .clone(), + ); + + // find and insert the deployment order + let (order_key, order) = self + .config + .orders + .iter() + .find(|(_, v)| *v == &deployment_ref.order) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Deployment \"{}\" order not found", + deployment + )))?; + new_config_source.orders.insert( + order_key.clone(), + config_source + .orders + .get(order_key) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Order \"{}\" not found", + order_key + )))? + .clone(), + ); + + // find and insert the deployment scenario deployer + let (deployer_key, deployer) = self + .config + .deployers + .iter() + .find(|(_, v)| *v == &scenario_ref.deployer) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Scenario \"{}\" deployer not found", + scenario_key + )))?; + new_config_source.deployers.insert( + deployer_key.clone(), + config_source + .deployers + .get(deployer_key) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Deployer \"{}\" not found", + deployer_key + )))? + .clone(), + ); + + // find and insert the deployment scenario deployer network + let (network_key, _) = self + .config + .networks + .iter() + .find(|(_, v)| *v == &deployer.network) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Scenario \"{}\" deployer network not found", + scenario_key + )))?; + new_config_source.networks.insert( + network_key.clone(), + config_source + .networks + .get(network_key) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Network \"{}\" not found", + network_key + )))? + .clone(), + ); + + // keep all network names to later on include charts if they happen to + // have the same network name as any of the names on this list + let mut all_network_keys = vec![]; + all_network_keys.push(network_key.as_str()); + + // find and insert the deployment order network + let (network_key, _) = self + .config + .networks + .iter() + .find(|(_, v)| *v == &order.network) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Order \"{}\" network not found", + order_key + )))?; + new_config_source.networks.insert( + network_key.clone(), + config_source + .networks + .get(network_key) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Network \"{}\" not found", + network_key + )))? + .clone(), + ); + if !all_network_keys.contains(&network_key.as_str()) { + all_network_keys.push(network_key.as_str()); + } + + // find and insert the deployment order deployer if it is present + if let Some(deployer_ref) = &order.deployer { + let (deployer_key, deployer) = self + .config + .deployers + .iter() + .find(|(_, v)| *v == deployer_ref) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Order \"{}\" deployer not found", + order_key + )))?; + new_config_source.deployers.insert( + deployer_key.clone(), + config_source + .deployers + .get(deployer_key) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Deployer \"{}\" not found", + order_key + )))? + .clone(), + ); + + // find and insert the deployment order deployer network + let (network_key, _) = self + .config + .networks + .iter() + .find(|(_, v)| *v == &deployer.network) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Order \"{}\" deployer network not found", + order_key + )))?; + new_config_source.networks.insert( + network_key.clone(), + config_source + .networks + .get(network_key) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Network \"{}\" not found", + network_key + )))? + .clone(), + ); + if !all_network_keys.contains(&network_key.as_str()) { + all_network_keys.push(network_key.as_str()); + } + } + + // find and insert the deployment order orderbook if it is present + if let Some(orderbook_ref) = &order.orderbook { + let (orderbook_key, orderbook) = self + .config + .orderbooks + .iter() + .find(|(_, v)| *v == orderbook_ref) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Order \"{}\" orderbook not found", + order_key + )))?; + new_config_source.orderbooks.insert( + orderbook_key.clone(), + config_source + .orderbooks + .get(orderbook_key) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Orderbook \"{}\" not found", + orderbook_key + )))? + .clone(), + ); + + // find and insert the deployment order orderbook subgraph + let (sg_key, _) = self + .config + .subgraphs + .iter() + .find(|(_, v)| *v == &orderbook_ref.subgraph) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Order \"{}\" orderbook subgraph not found", + order_key + )))?; + new_config_source.subgraphs.insert( + sg_key.clone(), + config_source + .subgraphs + .get(sg_key) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Subgraph \"{}\" not found", + sg_key + )))? + .clone(), + ); + + // find and insert the deployment order orderbook network + let (network_key, _) = self + .config + .networks + .iter() + .find(|(_, v)| *v == &orderbook.network) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Order \"{}\" orderbook network not found", + order_key + )))?; + new_config_source.networks.insert( + network_key.clone(), + config_source + .networks + .get(network_key) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Network \"{}\" not found", + network_key + )))? + .clone(), + ); + if !all_network_keys.contains(&network_key.as_str()) { + all_network_keys.push(network_key.as_str()); + } + } + + // find and insert the deployment order inputs + for io in &order.inputs { + let (token_key, token) = self + .config + .tokens + .iter() + .find(|(_, v)| *v == &io.token) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError( + format!("Order \"{}\" input token not found", order_key), + ))?; + new_config_source.tokens.insert( + token_key.clone(), + config_source + .tokens + .get(token_key) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Token \"{}\" not found", + token_key + )))? + .clone(), + ); + + // find and insert the deployment order input network + let (network_key, _) = self + .config + .networks + .iter() + .find(|(_, v)| *v == &token.network) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Input token \"{}\" network not found", + token_key + )))?; + new_config_source.networks.insert( + network_key.clone(), + config_source + .networks + .get(network_key) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Network \"{}\" not found", + network_key + )))? + .clone(), + ); + if !all_network_keys.contains(&network_key.as_str()) { + all_network_keys.push(network_key.as_str()); + } + } + + // // find and insert the deployment order outputs + for io in &order.outputs { + let (token_key, token) = self + .config + .tokens + .iter() + .find(|(_, v)| *v == &io.token) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError( + format!("Order \"{}\" output token not found", order_key), + ))?; + new_config_source.tokens.insert( + token_key.clone(), + config_source + .tokens + .get(token_key) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Token \"{}\" not found", + token_key + )))? + .clone(), + ); + + // find and insert the deployment order output network + let (network_key, _) = self + .config + .networks + .iter() + .find(|(_, v)| *v == &token.network) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Output token \"{}\" network not found", + token_key + )))?; + new_config_source.networks.insert( + network_key.clone(), + config_source + .networks + .get(network_key) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Network \"{}\" not found", + network_key + )))? + .clone(), + ); + if !all_network_keys.contains(&network_key.as_str()) { + all_network_keys.push(network_key.as_str()); + } + } + + // find and insert charts that reference the scenario or if they have + // same scenario network name as any previously added network + for (chart_key, chart) in &self.config.charts { + if chart.scenario.name.split('.').nth(0).unwrap() == scenario_key + || all_network_keys.contains(&chart.scenario.deployer.network.name.as_str()) + { + new_config_source.charts.insert( + chart_key.clone(), + config_source + .charts + .get(chart_key) + .ok_or(DotrainOrderError::CleanUnusedFrontmatterError(format!( + "Chart \"{}\" not found", + chart_key + )))? + .clone(), + ); + } + } + } + + // reserialize the new config and construct a new dotrain order with existing dotrain body + let mut new_dotrain = serde_yaml::to_string(&new_config_source) + .map_err(|e| DotrainOrderError::CleanUnusedFrontmatterError(e.to_string()))?; + new_dotrain.push_str("\n---\n"); + new_dotrain.push_str(self.dotrain.split("---").nth(1).unwrap()); + + Self::new(new_dotrain, None).await + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[tokio::test] + async fn test_filter_by_deployment_happy() { + let setting = r#" +networks: + some-network: + rpc: https://abcd.com + chain-id: 123 + network-id: 123 + currency: ETH + n2: + rpc: https://efgh.com + chain-id: 44 + network-id: 44 + currency: RRR + +subgraphs: + some-sg: https://www.some-sg.com + sg2: https://www.sg2.com + +deployers: + some-deployer: + network: some-network + address: 0xF14E09601A47552De6aBd3A0B165607FaFd2B5Ba + d2: + network: n2 + address: 0xF14E09601A47552De6aBd3A0B165607FaFd22134 + +orderbooks: + some-orderbook: + address: 0xc95A5f8eFe14d7a20BD2E5BAFEC4E71f8Ce0B9A6 + network: some-network + subgraph: some-sg + ob2: + address: 0xc95A5f8eFe14d7a20BD2E5BAFEC4E71f8Ce0B9A6 + network: n2 + subgraph: sg2 +"#; + let dotrain = r#" +tokens: + token1: + network: some-network + address: 0xc2132d05d31c914a87c6611c10748aeb04b58e8f + decimals: 6 + label: T1 + symbol: T1 + token2: + network: some-network + address: 0x8f3cf7ad23cd3cadbd9735aff958023239c6a063 + decimals: 18 + label: T2 + symbol: T2 + token3: + network: some-network + address: 0x8f3cf7ad23cd3cadbd9735aff958023239c6a063 + decimals: 77 + label: T3 + symbol: T3 + +scenarios: + some-scenario: + network: some-network + deployer: some-deployer + scenarios: + child-scenario: + bindings: + key1: value1 + s2: + network: n2 + deployer: d2 + scenarios: + child-scenario: + bindings: + key1: value1 + key2: value2 + +orders: + some-order: + inputs: + - token: token1 + vault-id: 1 + outputs: + - token: token1 + vault-id: 1 + deployer: some-deployer + orderbook: some-orderbook + +deployments: + some-deployment: + scenario: some-scenario.child-scenario + order: some-order + +charts: + some-chart: + scenario: some-scenario + metrics: + - label: A metric + description: A description + unit-prefix: $ + unit-suffix: USD + value: 0.1 + plots: + plot1: + title: "My plot" + subtitle: "My subtitle" + marks: + - type: dot + options: + x: "0.1" + y: "0.2" + stroke: "black" + other-chart: + scenario: s2 + metrics: + - label: B metric + description: B description + unit-prefix: $ + unit-suffix: EUR + value: 0.1 + another-chart: + scenario: s2.child-scenario + metrics: + - label: A metric + description: A description + unit-prefix: $ + unit-suffix: USD + value: 0.1 +--- +#calculate-io +_ _: 0 0; +#handle-io +:; +#handle-add-order +:;"#; + + let result = DotrainOrder::new_with_frontmatter_filtered_by_deployment( + dotrain.to_string(), + Some(setting.to_string()), + &["some-deployment"], + ) + .await + .unwrap(); + + let expected_dotrain = r#"networks: + some-network: + rpc: https://abcd.com/ + chain-id: 123 + network-id: 123 + currency: ETH +subgraphs: + some-sg: https://www.some-sg.com/ +orderbooks: + some-orderbook: + address: 0xc95a5f8efe14d7a20bd2e5bafec4e71f8ce0b9a6 + network: some-network + subgraph: some-sg +tokens: + token1: + network: some-network + address: 0xc2132d05d31c914a87c6611c10748aeb04b58e8f + decimals: 6 + label: T1 + symbol: T1 +deployers: + some-deployer: + address: 0xf14e09601a47552de6abd3a0b165607fafd2b5ba + network: some-network +orders: + some-order: + inputs: + - token: token1 + vault-id: '0x1' + outputs: + - token: token1 + vault-id: '0x1' + deployer: some-deployer + orderbook: some-orderbook +scenarios: + some-scenario: + deployer: some-deployer + scenarios: + child-scenario: + bindings: + key1: value1 +charts: + some-chart: + scenario: some-scenario + plots: + plot1: + title: My plot + subtitle: My subtitle + marks: + - type: dot + options: + x: '0.1' + y: '0.2' + stroke: black + metrics: + - label: A metric + description: A description + unit-prefix: $ + unit-suffix: USD + value: '0.1' +deployments: + some-deployment: + scenario: some-scenario.child-scenario + order: some-order + +--- + +#calculate-io +_ _: 0 0; +#handle-io +:; +#handle-add-order +:;"#; + + let expected = DotrainOrder::new(expected_dotrain.to_string(), None) + .await + .unwrap(); + + assert_eq!(result, expected); + } + + #[tokio::test] + async fn test_filter_by_deployment_unhappy() { + let setting = r#" +networks: + some-network: + rpc: https://abcd.com + chain-id: 123 + network-id: 123 + currency: ETH + +subgraphs: + some-sg: https://www.some-sg.com + +deployers: + some-deployer: + network: some-network + address: 0xF14E09601A47552De6aBd3A0B165607FaFd2B5Ba + +orderbooks: + some-orderbook: + address: 0xc95A5f8eFe14d7a20BD2E5BAFEC4E71f8Ce0B9A6 + network: some-network + subgraph: some-sg +"#; + let dotrain = r#" +tokens: + token1: + network: some-network + address: 0xc2132d05d31c914a87c6611c10748aeb04b58e8f + decimals: 6 + label: T1 + symbol: T1 + +scenarios: + some-scenario: + network: some-network + deployer: some-deployer + scenarios: + child-scenario: + bindings: + key1: value1 + +orders: + some-order: + inputs: + - token: token1 + vault-id: 1 + outputs: + - token: token1 + vault-id: 1 + deployer: some-deployer + +deployments: + some-deployment: + scenario: some-scenario.child-scenario + order: some-order +--- +#calculate-io +_ _: 0 0; +#handle-io +:; +#handle-add-order +:;"#; + + let result = DotrainOrder::new_with_frontmatter_filtered_by_deployment( + dotrain.to_string(), + Some(setting.to_string()), + &["some-other-deployment"], + ) + .await; + + matches!( + result, + Err(DotrainOrderError::CleanUnusedFrontmatterError(_)) + ); + } +} diff --git a/crates/common/src/dotrain_order.rs b/crates/common/src/dotrain_order/mod.rs similarity index 93% rename from crates/common/src/dotrain_order.rs rename to crates/common/src/dotrain_order/mod.rs index 66c6db225..04f4982b7 100644 --- a/crates/common/src/dotrain_order.rs +++ b/crates/common/src/dotrain_order/mod.rs @@ -15,10 +15,13 @@ use rain_orderbook_app_settings::{ use rain_orderbook_env::GH_COMMIT_SHA; use thiserror::Error; -#[derive(Clone)] +pub mod filter; + +#[derive(Debug, Clone, PartialEq)] pub struct DotrainOrder { - pub config: Config, - pub dotrain: String, + config: Config, + dotrain: String, + config_source: ConfigSource, } #[derive(Error, Debug)] @@ -53,6 +56,9 @@ pub enum DotrainOrderError { #[error(transparent)] ParserError(#[from] ParserError), + #[error("{0}")] + CleanUnusedFrontmatterError(String), + #[error("Raindex version mismatch: got {1}, should be {0}")] RaindexVersionMismatch(String, String), @@ -64,25 +70,44 @@ impl DotrainOrder { pub async fn new(dotrain: String, config: Option) -> Result { match config { Some(config) => { - let config_string = ConfigSource::try_from_string(config).await?; + let config_string = ConfigSource::try_from_string(config.clone()).await?; let frontmatter = RainDocument::get_front_matter(&dotrain).unwrap(); let mut frontmatter_config = ConfigSource::try_from_string(frontmatter.to_string()).await?; frontmatter_config.merge(config_string)?; Ok(Self { dotrain, + config_source: frontmatter_config.clone(), config: frontmatter_config.try_into()?, }) } None => { let frontmatter = RainDocument::get_front_matter(&dotrain).unwrap(); - let config_string = ConfigSource::try_from_string(frontmatter.to_string()).await?; - let config: Config = config_string.try_into()?; - Ok(Self { dotrain, config }) + let config_source = ConfigSource::try_from_string(frontmatter.to_string()).await?; + Ok(Self { + dotrain, + config_source: config_source.clone(), + config: config_source.try_into()?, + }) } } } + // get this instance's config + pub fn config(&self) -> &Config { + &self.config + } + + // get this instance's config source + pub fn config_source(&self) -> &ConfigSource { + &self.config_source + } + + // get this instance's dotrain string + pub fn dotrain(&self) -> &str { + &self.dotrain + } + pub async fn compose_scenario_to_rainlang( &self, scenario: String, diff --git a/crates/common/src/frontmatter.rs b/crates/common/src/frontmatter.rs index 2c009b8b7..d2a71807d 100644 --- a/crates/common/src/frontmatter.rs +++ b/crates/common/src/frontmatter.rs @@ -1,4 +1,5 @@ use dotrain::RainDocument; +pub use rain_metadata::types::authoring::v2::*; use rain_orderbook_app_settings::{config::ParseConfigSourceError, config_source::ConfigSource}; /// Parse dotrain frontmatter and merges it with top Config if given diff --git a/crates/integration_tests/src/lib.rs b/crates/integration_tests/src/lib.rs index 726b156c2..fec3363a2 100644 --- a/crates/integration_tests/src/lib.rs +++ b/crates/integration_tests/src/lib.rs @@ -99,7 +99,7 @@ amount price: get("amount") 52; let order = DotrainOrder::new(dotrain.clone(), None).await.unwrap(); - let deployment = order.config.deployments["polygon"].as_ref().clone(); + let deployment = order.config().deployments["polygon"].as_ref().clone(); let args = AddOrderArgs::new_from_deployment(dotrain, deployment) .await @@ -250,7 +250,7 @@ amount price: get("amount") 52; let order = DotrainOrder::new(dotrain.clone(), None).await.unwrap(); - let deployment = order.config.deployments["polygon"].as_ref().clone(); + let deployment = order.config().deployments["polygon"].as_ref().clone(); let args = AddOrderArgs::new_from_deployment(dotrain, deployment) .await diff --git a/crates/settings/src/chart.rs b/crates/settings/src/chart.rs index 722049012..8224cb2eb 100644 --- a/crates/settings/src/chart.rs +++ b/crates/settings/src/chart.rs @@ -19,10 +19,14 @@ pub struct Chart { #[serde(rename_all = "kebab-case")] pub struct Metric { pub label: String, + #[serde(skip_serializing_if = "Option::is_none")] pub description: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub unit_prefix: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub unit_suffix: Option, pub value: String, + #[serde(skip_serializing_if = "Option::is_none")] pub precision: Option, } diff --git a/crates/settings/src/config_source.rs b/crates/settings/src/config_source.rs index 0ce48662f..9c22fb4d3 100644 --- a/crates/settings/src/config_source.rs +++ b/crates/settings/src/config_source.rs @@ -9,32 +9,34 @@ use typeshare::typeshare; use url::Url; #[typeshare] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq)] #[serde(rename_all = "kebab-case")] pub struct ConfigSource { - #[serde(default)] + #[serde(default, skip_serializing_if = "HashMap::is_empty")] pub using_networks_from: HashMap, - #[serde(default)] + #[serde(default, skip_serializing_if = "HashMap::is_empty")] pub networks: HashMap, - #[serde(default)] + #[serde(default, skip_serializing_if = "HashMap::is_empty")] pub subgraphs: HashMap, - #[serde(default)] + #[serde(default, skip_serializing_if = "HashMap::is_empty")] pub orderbooks: HashMap, - #[serde(default)] + #[serde(default, skip_serializing_if = "HashMap::is_empty")] pub tokens: HashMap, - #[serde(default)] + #[serde(default, skip_serializing_if = "HashMap::is_empty")] pub deployers: HashMap, - #[serde(default)] + #[serde(default, skip_serializing_if = "HashMap::is_empty")] pub orders: HashMap, - #[serde(default)] + #[serde(default, skip_serializing_if = "HashMap::is_empty")] pub scenarios: HashMap, - #[serde(default)] + #[serde(default, skip_serializing_if = "HashMap::is_empty")] pub charts: HashMap, - #[serde(default)] + #[serde(default, skip_serializing_if = "HashMap::is_empty")] pub deployments: HashMap, - #[serde(default)] + #[serde(default, skip_serializing_if = "HashMap::is_empty")] pub metaboards: HashMap, + #[serde(skip_serializing_if = "Option::is_none")] pub sentry: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub raindex_version: Option, } @@ -63,20 +65,23 @@ pub type TokenRef = String; pub type MetaboardRef = String; #[typeshare] -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] #[serde(rename_all = "kebab-case")] pub struct NetworkConfigSource { pub rpc: Url, #[typeshare(typescript(type = "number"))] pub chain_id: u64, + #[serde(skip_serializing_if = "Option::is_none")] pub label: Option, #[typeshare(typescript(type = "number"))] + #[serde(skip_serializing_if = "Option::is_none")] pub network_id: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub currency: Option, } #[typeshare] -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] #[serde(rename_all = "kebab-case")] pub struct RemoteNetworksConfigSource { pub url: String, @@ -84,37 +89,45 @@ pub struct RemoteNetworksConfigSource { } #[typeshare] -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] #[serde(rename_all = "kebab-case")] pub struct OrderbookConfigSource { pub address: Address, + #[serde(skip_serializing_if = "Option::is_none")] pub network: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub subgraph: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub label: Option, } #[typeshare] -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] #[serde(rename_all = "kebab-case")] pub struct TokenConfigSource { pub network: NetworkRef, pub address: Address, + #[serde(skip_serializing_if = "Option::is_none")] pub decimals: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub label: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub symbol: Option, } #[typeshare] -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] #[serde(rename_all = "kebab-case")] pub struct DeployerConfigSource { pub address: Address, + #[serde(skip_serializing_if = "Option::is_none")] pub network: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub label: Option, } #[typeshare] -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] #[serde(rename_all = "kebab-case")] pub struct DeploymentConfigSource { pub scenario: ScenarioRef, @@ -122,44 +135,54 @@ pub struct DeploymentConfigSource { } #[typeshare] -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] #[serde(rename_all = "kebab-case")] pub struct IOString { pub token: TokenRef, #[typeshare(typescript(type = "bigint"))] + #[serde(skip_serializing_if = "Option::is_none")] pub vault_id: Option, } #[typeshare] -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] #[serde(rename_all = "kebab-case")] pub struct OrderConfigSource { pub inputs: Vec, pub outputs: Vec, + #[serde(skip_serializing_if = "Option::is_none")] pub deployer: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub orderbook: Option, } #[typeshare] -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] #[serde(rename_all = "kebab-case")] pub struct ScenarioConfigSource { - #[serde(default)] + #[serde(default, skip_serializing_if = "HashMap::is_empty")] pub bindings: HashMap, #[typeshare(typescript(type = "number"))] + #[serde(skip_serializing_if = "Option::is_none")] pub runs: Option, #[typeshare(skip)] + #[serde(skip_serializing_if = "Option::is_none")] pub blocks: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub deployer: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub scenarios: Option>, } #[typeshare] -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] #[serde(rename_all = "kebab-case")] pub struct ChartConfigSource { + #[serde(skip_serializing_if = "Option::is_none")] pub scenario: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub plots: Option>, + #[serde(skip_serializing_if = "Option::is_none")] pub metrics: Option>, } diff --git a/crates/settings/src/plot_source.rs b/crates/settings/src/plot_source.rs index 1a01dfe82..18a0c7d5f 100644 --- a/crates/settings/src/plot_source.rs +++ b/crates/settings/src/plot_source.rs @@ -5,16 +5,26 @@ use typeshare::typeshare; #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] #[serde(rename_all = "kebab-case")] pub struct Plot { + #[serde(skip_serializing_if = "Option::is_none")] pub title: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub subtitle: Option, pub marks: Vec, + #[serde(skip_serializing_if = "Option::is_none")] pub x: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub y: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub margin: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub margin_left: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub margin_right: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub margin_top: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub margin_bottom: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub inset: Option, } @@ -30,22 +40,34 @@ pub enum Mark { #[typeshare] #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] pub struct DotOptions { + #[serde(skip_serializing_if = "Option::is_none")] pub x: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub y: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub r: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub fill: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub stroke: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub transform: Option, } #[typeshare] #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] pub struct LineOptions { + #[serde(skip_serializing_if = "Option::is_none")] pub x: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub y: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub r: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub fill: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub stroke: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub transform: Option, } @@ -53,10 +75,15 @@ pub struct LineOptions { #[typeshare] #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] pub struct RectYOptions { + #[serde(skip_serializing_if = "Option::is_none")] pub x0: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub x1: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub y0: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub y1: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub transform: Option, } @@ -65,9 +92,13 @@ pub struct RectYOptions { #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] #[serde(rename_all = "kebab-case")] pub struct AxisOptions { + #[serde(skip_serializing_if = "Option::is_none")] pub label: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub anchor: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub label_anchor: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub label_arrow: Option, } @@ -84,11 +115,17 @@ pub enum Transform { #[typeshare] #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] pub struct TransformOutputs { + #[serde(skip_serializing_if = "Option::is_none")] x: Option, + #[serde(skip_serializing_if = "Option::is_none")] y: Option, + #[serde(skip_serializing_if = "Option::is_none")] r: Option, + #[serde(skip_serializing_if = "Option::is_none")] z: Option, + #[serde(skip_serializing_if = "Option::is_none")] stroke: Option, + #[serde(skip_serializing_if = "Option::is_none")] fill: Option, } @@ -103,8 +140,11 @@ pub struct HexBinTransform { #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] #[serde(rename_all = "kebab-case")] pub struct HexBinOptions { + #[serde(skip_serializing_if = "Option::is_none")] x: Option, + #[serde(skip_serializing_if = "Option::is_none")] y: Option, + #[serde(skip_serializing_if = "Option::is_none")] bin_width: Option, } @@ -118,7 +158,9 @@ pub struct BinXTransform { #[typeshare] #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] pub struct BinXOptions { + #[serde(skip_serializing_if = "Option::is_none")] x: Option, + #[serde(skip_serializing_if = "Option::is_none")] thresholds: Option, }