diff --git a/adapter/src/adapter.rs b/adapter/src/adapter.rs index 37bbd1066..6ee763b04 100644 --- a/adapter/src/adapter.rs +++ b/adapter/src/adapter.rs @@ -1,5 +1,6 @@ use crate::primitives::*; use async_trait::async_trait; +use primitives::{ChainId, ChainOf, Channel}; use std::{marker::PhantomData, sync::Arc}; use crate::{ @@ -93,8 +94,10 @@ where self.client.sign(state_root).map_err(Into::into) } - fn get_auth(&self, intended_for: ValidatorId) -> Result { - self.client.get_auth(intended_for).map_err(Into::into) + fn get_auth(&self, for_chain: ChainId, intended_for: ValidatorId) -> Result { + self.client + .get_auth(for_chain, intended_for) + .map_err(Into::into) } } @@ -134,11 +137,11 @@ where async fn get_deposit( &self, - channel: &Channel, + channel_context: &ChainOf, depositor_address: Address, ) -> Result { self.client - .get_deposit(channel, depositor_address) + .get_deposit(channel_context, depositor_address) .await .map_err(Into::into) } diff --git a/adapter/src/client.rs b/adapter/src/client.rs index 62ad54211..315c973c3 100644 --- a/adapter/src/client.rs +++ b/adapter/src/client.rs @@ -6,7 +6,7 @@ use crate::primitives::{Deposit, Session}; use async_trait::async_trait; -use primitives::{Address, Channel, ValidatorId}; +use primitives::{Address, ChainId, ChainOf, Channel, ValidatorId}; #[async_trait] /// Available methods for Locked clients. @@ -30,7 +30,7 @@ pub trait Locked: Sync + Send { async fn get_deposit( &self, - channel: &Channel, + channel_context: &ChainOf, depositor_address: Address, ) -> Result; @@ -48,6 +48,7 @@ pub trait Locked: Sync + Send { } /// Available methods for Unlocked clients. +/// /// Unlocked clients should also implement [`Locked`]. #[async_trait] pub trait Unlocked: Locked { @@ -55,7 +56,11 @@ pub trait Unlocked: Locked { fn sign(&self, state_root: &str) -> Result; // requires Unlocked - fn get_auth(&self, intended_for: ValidatorId) -> Result; + fn get_auth( + &self, + for_chain: ChainId, + intended_for: ValidatorId, + ) -> Result; } /// A client that can be `unlock()`ed diff --git a/adapter/src/dummy.rs b/adapter/src/dummy.rs index 121fb8df5..78af67134 100644 --- a/adapter/src/dummy.rs +++ b/adapter/src/dummy.rs @@ -8,11 +8,28 @@ use crate::{ use async_trait::async_trait; use dashmap::{mapref::entry::Entry, DashMap}; -use primitives::{Address, Channel, ChannelId, ToETHChecksum, ValidatorId}; +use once_cell::sync::Lazy; +use primitives::{ + Address, Chain, ChainId, ChainOf, Channel, ChannelId, ToETHChecksum, ValidatorId, +}; use std::{collections::HashMap, sync::Arc}; pub type Adapter = crate::Adapter; +/// The Dummy Chain to be used with this adapter +/// The Chain is not applicable to the adapter, however, it is required for +/// applications because of the `authentication` & [`Channel`] interactions. +pub static DUMMY_CHAIN: Lazy = Lazy::new(|| Chain { + chain_id: ChainId::new(1), + rpc: "http://dummy.com".parse().expect("Should parse ApiUrl"), + outpace: "0x0000000000000000000000000000000000000000" + .parse() + .unwrap(), + sweeper: "0x0000000000000000000000000000000000000000" + .parse() + .unwrap(), +}); + /// Dummy adapter implementation intended for testing. #[derive(Debug, Clone)] pub struct Dummy { @@ -86,6 +103,9 @@ impl Locked for Dummy { } /// Verify, based on the signature & state_root, that the signer is the same + /// + /// Splits the signature by `" "` (whitespace) and takes + /// the last part of it which contains the signer [`Address`]. fn verify( &self, signer: ValidatorId, @@ -102,8 +122,8 @@ impl Locked for Dummy { Ok(is_same) } - /// Creates a `Session` from a provided Token by calling the Contract. - /// Does **not** cache the (`Token`, `Session`) pair. + /// Finds the authorization token from the configured values + /// and creates a [`Session`] out of it using a [`ChainId`] of `1`. async fn session_from_token(&self, token: &str) -> Result { let identity = self .authorization_tokens @@ -114,6 +134,7 @@ impl Locked for Dummy { Some((address, _token)) => Ok(Session { uid: *address, era: 0, + chain: DUMMY_CHAIN.clone(), }), None => Err(Error::authentication(format!( "No identity found that matches authentication token: {}", @@ -124,11 +145,11 @@ impl Locked for Dummy { async fn get_deposit( &self, - channel: &Channel, + channel_context: &ChainOf, depositor_address: Address, ) -> Result { self.deposits - .get_next_deposit(channel.id(), depositor_address) + .get_next_deposit(channel_context.context.id(), depositor_address) .ok_or_else(|| { Error::adapter(format!( "No more mocked deposits found for depositor {:?}", @@ -151,7 +172,7 @@ impl Unlocked for Dummy { } // requires Unlocked - fn get_auth(&self, _intended_for: ValidatorId) -> Result { + fn get_auth(&self, _for_chain: ChainId, _intended_for: ValidatorId) -> Result { self.authorization_tokens .get(&self.identity.to_address()) .cloned() @@ -174,9 +195,12 @@ impl Unlockable for Dummy { #[cfg(test)] mod test { + use std::num::NonZeroU8; + use primitives::{ + config::TokenInfo, util::tests::prep_db::{ADDRESSES, DUMMY_CAMPAIGN, IDS}, - BigNum, + BigNum, ChainOf, UnifiedNum, }; use super::*; @@ -184,6 +208,18 @@ mod test { #[tokio::test] async fn test_deposits_calls() { let channel = DUMMY_CAMPAIGN.channel; + + let channel_context = ChainOf { + context: channel, + token: TokenInfo { + min_token_units_for_deposit: 1_u64.into(), + min_validator_fee: 1_u64.into(), + precision: NonZeroU8::new(UnifiedNum::PRECISION).expect("Non zero u8"), + address: channel.token, + }, + chain: DUMMY_CHAIN.clone(), + }; + let dummy_client = Dummy::init(Options { dummy_identity: IDS["leader"], dummy_auth_tokens: Default::default(), @@ -193,7 +229,7 @@ mod test { // no mocked deposit calls should cause an Error { - let result = dummy_client.get_deposit(&channel, address).await; + let result = dummy_client.get_deposit(&channel_context, address).await; assert!(result.is_err()); } @@ -211,25 +247,25 @@ mod test { dummy_client.add_deposit_call(channel.id(), address, deposits[1].clone()); let first_call = dummy_client - .get_deposit(&channel, address) + .get_deposit(&channel_context, address) .await .expect("Should get first mocked deposit"); assert_eq!(&deposits[0], &first_call); // should not affect the Mocked deposit calls and should cause an error let different_address_call = dummy_client - .get_deposit(&channel, ADDRESSES["leader"]) + .get_deposit(&channel_context, ADDRESSES["leader"]) .await; assert!(different_address_call.is_err()); let second_call = dummy_client - .get_deposit(&channel, address) + .get_deposit(&channel_context, address) .await .expect("Should get second mocked deposit"); assert_eq!(&deposits[1], &second_call); // Third call should error, we've only mocked 2 calls! - let third_call = dummy_client.get_deposit(&channel, address).await; + let third_call = dummy_client.get_deposit(&channel_context, address).await; assert!(third_call.is_err()); } } diff --git a/adapter/src/error.rs b/adapter/src/error.rs index 215a6860a..f1ac09cdf 100644 --- a/adapter/src/error.rs +++ b/adapter/src/error.rs @@ -70,7 +70,7 @@ impl fmt::Display for Inner { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match &self.source { // Writes: "Kind: Error message here" - Some(source) => write!(f, "{}: {}", self.kind, source.to_string()), + Some(source) => write!(f, "{}: {}", self.kind, source), // Writes: "Kind" None => write!(f, "{}", self.kind), } diff --git a/adapter/src/ethereum/client.rs b/adapter/src/ethereum/client.rs index 162a2686f..edb6dac48 100644 --- a/adapter/src/ethereum/client.rs +++ b/adapter/src/ethereum/client.rs @@ -11,7 +11,7 @@ use ethstore::{ ethkey::{verify_address, Message, Signature}, SafeAccount, }; -use primitives::{Address, BigNum, Channel, Config, ValidatorId}; +use primitives::{Address, BigNum, Chain, ChainId, ChainOf, Channel, Config, ValidatorId}; use super::{ channel::EthereumChannel, @@ -61,10 +61,21 @@ pub struct Options { pub struct Ethereum { address: ValidatorId, config: Config, - web3: Web3, pub(crate) state: S, } +pub(crate) trait ChainTransport { + fn init_web3(&self) -> web3::Result>; +} + +impl ChainTransport for Chain { + fn init_web3(&self) -> web3::Result> { + let transport = Http::new(self.rpc.as_str())?; + + Ok(Web3::new(transport)) + } +} + impl Ethereum { pub fn init(opts: Options, config: &Config) -> Result { let keystore_contents = @@ -83,14 +94,9 @@ impl Ethereum { .map_err(KeystoreError::AddressInvalid) }?; - let transport = - web3::transports::Http::new(&config.ethereum_network).map_err(Error::Web3)?; - let web3 = web3::Web3::new(transport); - Ok(Self { address, config: config.to_owned(), - web3, state: LockedWallet::KeyStore { keystore: keystore_json, password: opts.keystore_pwd.into(), @@ -106,6 +112,7 @@ impl Ethereum { /// **Note:** Signature should be `01` suffixed for Eth Sign for this call. pub async fn has_privileges( &self, + chain: &Chain, identity: Address, hash: [u8; 32], signature_with_mode: &[u8], @@ -115,9 +122,12 @@ impl Ethereum { // u32::MAX = 4294967295 let _no_access_value: u32 = 0xffffffff; - let identity_contract = - Contract::from_json(self.web3.eth(), H160(identity.to_bytes()), &IDENTITY_ABI) - .map_err(Error::ContractInitialization)?; + let identity_contract = Contract::from_json( + chain.init_web3()?.eth(), + H160(identity.to_bytes()), + &IDENTITY_ABI, + ) + .map_err(Error::ContractInitialization)?; // we receive `bytes4` from the contract let status: [u8; 4] = identity_contract @@ -167,7 +177,6 @@ impl Unlockable for Ethereum { Ok(Ethereum { address: self.address, config: self.config.clone(), - web3: self.web3.clone(), state: unlocked_wallet, }) } @@ -176,6 +185,7 @@ impl Unlockable for Ethereum { #[async_trait] impl Locked for Ethereum { type Error = Error; + fn whoami(&self) -> ValidatorId { self.address } @@ -203,6 +213,9 @@ impl Locked for Ethereum { } /// Creates a `Session` from a provided Token by calling the Contract. + /// + /// This methods validates that the [`Payload`]'s [`Chain`] is whitelisted in the configuration. + /// /// Does **not** cache the (`Token`, `Session`) pair. async fn session_from_token(&self, token: &str) -> Result { let (verified_token, verified) = ewt::Token::verify(token).map_err(Error::VerifyMessage)?; @@ -214,12 +227,21 @@ impl Locked for Ethereum { }); } + // Check if Payload chain is whitelisted + let whitelisted_chain = self + .config + .find_chain(verified.payload.chain_id) + .ok_or(Error::ChainNotWhitelisted(verified.payload.chain_id))? + .chain + .clone(); + let sess = match &verified.payload.identity { Some(identity) => { // the Hash for has_privileges should **not** be an Ethereum Signed Message hash if self .has_privileges( + &whitelisted_chain, *identity, verified_token.message_hash, &verified_token.signature, @@ -229,6 +251,7 @@ impl Locked for Ethereum { Session { era: verified.payload.era, uid: identity.to_owned(), + chain: whitelisted_chain, } } else { return Err(Error::InsufficientAuthorizationPrivilege); @@ -237,6 +260,7 @@ impl Locked for Ethereum { None => Session { era: verified.payload.era, uid: verified.from, + chain: whitelisted_chain, }, }; @@ -245,29 +269,29 @@ impl Locked for Ethereum { async fn get_deposit( &self, - channel: &Channel, + channel_context: &ChainOf, depositor_address: Address, ) -> Result { - let token_info = self - .config - .token_address_whitelist - .get(&channel.token) - .ok_or(Error::TokenNotWhitelisted(channel.token))?; + let channel = channel_context.context; + let token = &channel_context.token; + let chain = &channel_context.chain; + + let web3 = chain.init_web3()?; let outpace_contract = Contract::from_json( - self.web3.eth(), - self.config.outpace_address.into(), + web3.eth(), + channel_context.chain.outpace.as_bytes().into(), &OUTPACE_ABI, ) .map_err(Error::ContractInitialization)?; let erc20_contract = - Contract::from_json(self.web3.eth(), channel.token.as_bytes().into(), &ERC20_ABI) + Contract::from_json(web3.eth(), channel.token.as_bytes().into(), &ERC20_ABI) .map_err(Error::ContractInitialization)?; let sweeper_contract = Contract::from_json( - self.web3.eth(), - self.config.sweeper_address.into(), + web3.eth(), + channel_context.chain.sweeper.as_bytes().into(), &SWEEPER_ABI, ) .map_err(Error::ContractInitialization)?; @@ -293,7 +317,7 @@ impl Locked for Ethereum { let counterfactual_address = get_counterfactual_address( sweeper_address, - channel, + &channel, outpace_address, depositor_address, ); @@ -314,7 +338,7 @@ impl Locked for Ethereum { .map_err(Error::BigNumParsing)?; // Count the create2 deposit only if it's > minimum token units configured - let deposit = if still_on_create2 > token_info.min_token_units_for_deposit { + let deposit = if still_on_create2 > token.min_token_units_for_deposit { Deposit { total: &still_on_create2 + &on_outpace, still_on_create2, @@ -346,13 +370,14 @@ impl Unlocked for Ethereum { Ok(format!("0x{}", signature)) } - fn get_auth(&self, intended_for: ValidatorId) -> Result { + fn get_auth(&self, for_chain: ChainId, intended_for: ValidatorId) -> Result { let era = Utc::now().timestamp_millis() as f64 / 60000.0; let payload = Payload { id: intended_for, era: era.floor() as i64, identity: None, address: self.whoami().to_address(), + chain_id: for_chain, }; let token = ewt::Token::sign(&self.state.wallet, &self.state.password, payload) @@ -362,69 +387,11 @@ impl Unlocked for Ethereum { } } -// #[cfg(test)] -// mod test { -// use primitives::{ -// adapter::adapter2::Adapter, -// test_util::{ADDRESS_1, DUMMY_CAMPAIGN}, -// }; - -// use super::*; -// #[tokio::test] -// async fn use_adapter() { -// // With Locked Client -// { -// let ethereum = Ethereum { -// web3: (), -// state: LockedWallet::KeyStore { -// keystore: (), -// password: (), -// }, -// }; -// let adapter = Adapter::new(ethereum); - -// // Should be able to call get_deposit before unlocking! -// adapter -// .get_deposit(&DUMMY_CAMPAIGN.channel, &ADDRESS_1) -// .await -// .expect("Should get deposit"); - -// let unlocked = adapter.unlock().expect("Should unlock"); - -// unlocked -// .get_auth((*ADDRESS_1).into()) -// .await -// .expect("Should get Auth"); -// } - -// // with Unlocked Client -// { -// let ethereum = Ethereum { -// web3: (), -// state: UnlockedWallet { -// wallet: (), -// password: (), -// }, -// }; - -// let adapter = Adapter::with_unlocked(ethereum); - -// adapter -// .get_deposit(&DUMMY_CAMPAIGN.channel, &ADDRESS_1) -// .await -// .expect("Should get deposit"); -// adapter -// .get_auth((*ADDRESS_1).into()) -// .await -// .expect("Should get Auth"); -// } -// } -// } - #[cfg(test)] mod test { use super::{ewt::ETH_SIGN_SUFFIX, Ethereum}; use crate::ethereum::{ + client::ChainTransport, ewt::{self, Payload}, get_counterfactual_address, test_util::*, @@ -439,19 +406,18 @@ mod test { use ethstore::ethkey::Message; use primitives::{ - config::{DEVELOPMENT_CONFIG, GANACHE_CONFIG}, + config::GANACHE_CONFIG, test_util::{ADDRESS_3, ADDRESS_4, ADDRESS_5, ADVERTISER, CREATOR, LEADER}, util::tests::prep_db::IDS, - BigNum, ToHex, ValidatorId, + BigNum, ChainOf, ToHex, ValidatorId, }; use web3::{ - contract::Options as ContractOptions, ethabi::Token, signing::keccak256, transports::Http, - types::H160, Web3, + contract::Options as ContractOptions, ethabi::Token, signing::keccak256, types::H160, }; #[test] fn should_init_and_unlock_ethereum_adapter() { - let _eth_adapter = Ethereum::init(KEYSTORE_IDENTITY.1.clone(), &DEVELOPMENT_CONFIG) + let _eth_adapter = Ethereum::init(KEYSTORE_IDENTITY.1.clone(), &GANACHE_CONFIG) .expect("Should init") .unlock() .expect("should unlock eth adapter"); @@ -460,7 +426,7 @@ mod test { #[test] fn should_get_whoami_sign_and_verify_messages() { // whoami - let eth_adapter = Ethereum::init(KEYSTORE_IDENTITY.1.clone(), &DEVELOPMENT_CONFIG) + let eth_adapter = Ethereum::init(KEYSTORE_IDENTITY.1.clone(), &GANACHE_CONFIG) .expect("Should init") .unlock() .expect("should unlock eth adapter"); @@ -511,6 +477,11 @@ mod test { .unlock() .expect("should unlock eth adapter"); + let ganache_chain = GANACHE_1337.clone(); + let web3 = ganache_chain + .init_web3() + .expect("Should init the Web3 client"); + let evil = *ADDRESS_5; let identify_as = *ADDRESS_3; @@ -522,10 +493,9 @@ mod test { assert_eq!(&hex::encode(msg_hash_actual), msg_hash); assert_eq!(&msg_hash_decoded, &msg_hash_actual); - let (identity_address, _contract) = - deploy_identity_contract(&user_adapter.web3, identify_as, &[user]) - .await - .expect("Should deploy identity"); + let (identity_address, _contract) = deploy_identity_contract(&web3, identify_as, &[user]) + .await + .expect("Should deploy identity"); // User should have privileges! { @@ -550,7 +520,12 @@ mod test { assert_eq!(user_sig, signature_actual.to_hex_prefixed()); let has_privileges = user_adapter - .has_privileges(identity_address, msg_hash_actual, &signature_actual) + .has_privileges( + &ganache_chain, + identity_address, + msg_hash_actual, + &signature_actual, + ) .await .expect("Should get privileges"); @@ -585,7 +560,12 @@ mod test { assert_eq!(evil_sig, signature_actual.to_hex_prefixed()); let has_privileges = evil_adapter - .has_privileges(identity_address, msg_hash_actual, &signature_actual) + .has_privileges( + &ganache_chain, + identity_address, + msg_hash_actual, + &signature_actual, + ) .await .expect("Should get privileges"); @@ -606,10 +586,14 @@ mod test { "Ethereum address should be authenticated with keystore file as LEADER!" ); - let (identity_address, contract) = - deploy_identity_contract(&adapter.web3, *CREATOR, &[whoami]) - .await - .expect("Should deploy identity"); + let ganache_chain = GANACHE_1337.clone(); + let web3 = ganache_chain + .init_web3() + .expect("Should init the Web3 client"); + + let (identity_address, contract) = deploy_identity_contract(&web3, *CREATOR, &[whoami]) + .await + .expect("Should deploy identity"); let set_privileges: [u8; 32] = contract .query( @@ -638,6 +622,7 @@ mod test { era: era.floor() as i64, address: adapter.whoami().to_address(), identity: Some(identity_address), + chain_id: ganache_chain.chain_id, }; let auth_token = ewt::Token::sign(&adapter.state.wallet, &adapter.state.password, payload) @@ -645,6 +630,7 @@ mod test { let has_privileges = adapter .has_privileges( + &ganache_chain, identity_address, auth_token.message_hash, &auth_token.signature, @@ -662,8 +648,13 @@ mod test { .unlock() .expect("should unlock eth adapter"); + let ganache_chain = GANACHE_1337.clone(); + let web3 = ganache_chain + .init_web3() + .expect("Should init the Web3 client"); + let (identity_address, _contract) = - deploy_identity_contract(&adapter.web3, *CREATOR, &[*ADVERTISER]) + deploy_identity_contract(&web3, *CREATOR, &[*ADVERTISER]) .await .expect("Should deploy identity"); @@ -679,9 +670,12 @@ mod test { // the intended ValidatorId for whom the payload is. id: adapter.whoami(), era: era.floor() as i64, + // the identity as which we'd like to authenticate identity: Some(identity_address), // The singer address address: signer_adapter.whoami().to_address(), + // the chain we need to make the token for + chain_id: ganache_chain.chain_id, }; let token = ewt::Token::sign( @@ -693,7 +687,12 @@ mod test { // double check that we have privileges for _Who Am I_ assert!(adapter - .has_privileges(identity_address, token.message_hash, &token.signature) + .has_privileges( + &ganache_chain, + identity_address, + token.message_hash, + &token.signature + ) .await .expect("Ok")); @@ -703,7 +702,7 @@ mod test { #[tokio::test] async fn get_deposit_and_count_create2_when_min_tokens_received() { - let web3 = Web3::new(Http::new(GANACHE_URL).expect("failed to init transport")); + let web3 = GANACHE_WEB3.clone(); let leader_account = *LEADER; @@ -723,19 +722,50 @@ mod test { let spender = *CREATOR; + let (config, chain_context) = { + let mut init_chain = GANACHE_1337.clone(); + init_chain.outpace = outpace.0; + init_chain.sweeper = sweeper.0; + + let mut config = GANACHE_CONFIG.clone(); + + // Assert that the Ganache chain exist in the configuration + let mut config_chain = config + .chains + .values_mut() + .find(|chain_info| chain_info.chain.chain_id == init_chain.chain_id) + .expect("Should find Ganache chain in the configuration"); + + // override the chain to use the outpace & sweeper addresses that were just deployed + config_chain.chain = init_chain.clone(); + + // Assert that the token that was just deploy does not exist in the Config + assert!( + config_chain + .tokens + .values() + .find(|config_token_info| config_token_info.address == token.1) + .is_none(), + "Config should not have this token address, we've just deployed the contract." + ); + + let token_exists = config_chain.tokens.insert("TOKEN".into(), token.0.clone()); + + // Assert that the token name that was just deploy does not exist in the Config + assert!( + token_exists.is_none(), + "This token name should not pre-exist in Ganache config" + ); + + let chain_context = ChainOf::new(init_chain, token.0.clone()); + + (config, chain_context) + }; + let channel = get_test_channel(token_address); + let channel_context = chain_context.with(channel); - let mut config = DEVELOPMENT_CONFIG.clone(); - config.sweeper_address = sweeper.0.to_bytes(); - config.outpace_address = outpace.0.to_bytes(); // since we deploy a new contract, it's should be different from all the ones found in config. - assert!( - config - .token_address_whitelist - .insert(token_address, token.0) - .is_none(), - "Should not have previous value, we've just deployed the contract." - ); let eth_adapter = Ethereum::init(KEYSTORES[&LEADER].clone(), &config) .expect("should init ethereum adapter") .unlock() @@ -747,7 +777,7 @@ mod test { // No Regular nor Create2 deposits { let no_deposits = eth_adapter - .get_deposit(&channel, spender) + .get_deposit(&channel_context, spender) .await .expect("should get deposit"); @@ -760,43 +790,54 @@ mod test { ); } + // 10^18 = 1 TOKEN + let one_token = { + let deposit = "1000000000000000000".parse::().unwrap(); + // make sure 1 TOKEN is the minimum set in Config + let config_token = eth_adapter + .config + .find_chain_token(channel.token) + .expect("Channel token should be present in Config") + .token; + + assert!( + deposit >= config_token.min_token_units_for_deposit, + "The minimum deposit should be >= the configured token minimum token units" + ); + + deposit + }; + // Regular deposit in Outpace without Create2 { - mock_set_balance( - &token.2, - *LEADER.as_bytes(), - *spender.as_bytes(), - &BigNum::from(10_000), - ) - .await - .expect("Failed to set balance"); + assert!(token.1 == channel.token); + mock_set_balance(&token.2, LEADER.to_bytes(), spender.to_bytes(), &one_token) + .await + .expect("Failed to set balance"); - outpace_deposit( - &outpace.1, - &channel, - *spender.as_bytes(), - &BigNum::from(10_000), - ) - .await - .expect("Should deposit funds"); + outpace_deposit(&outpace.1, &channel, spender.to_bytes(), &one_token) + .await + .expect("Should deposit funds"); let regular_deposit = eth_adapter - .get_deposit(&channel, spender) + .get_deposit(&channel_context, spender) .await .expect("should get deposit"); assert_eq!( Deposit { - total: BigNum::from(10_000), + total: one_token.clone(), still_on_create2: BigNum::from(0), }, regular_deposit ); } - // Deposit with less than minimum token units + // Create2 deposit with less than minimum token units + // 1 TOKEN = 1 * 10^18 + // 999 * 10^18 < 1 TOKEN { - // Set balance < minimal token units, i.e. `1_000` + // Set balance < minimal token units, i.e. 1 TOKEN mock_set_balance( &token.2, leader_account.to_bytes(), @@ -807,13 +848,13 @@ mod test { .expect("Failed to set balance"); let deposit_with_create2 = eth_adapter - .get_deposit(&channel, spender) + .get_deposit(&channel_context, spender) .await .expect("should get deposit"); assert_eq!( Deposit { - total: BigNum::from(10_000), + total: one_token.clone(), // tokens are **less** than the minimum tokens required for deposits to count still_on_create2: BigNum::from(0), }, @@ -834,13 +875,13 @@ mod test { .expect("Failed to set balance"); let deposit_with_create2 = eth_adapter - .get_deposit(&channel, spender) + .get_deposit(&channel_context, spender) .await .expect("should get deposit"); assert_eq!( Deposit { - total: BigNum::from(11_999), + total: &one_token + BigNum::from(1_999), // tokens are more than the minimum tokens required for deposits to count still_on_create2: BigNum::from(1_999), }, @@ -860,13 +901,13 @@ mod test { .expect("Should sweep the Spender account"); let swept_deposit = eth_adapter - .get_deposit(&channel, spender) + .get_deposit(&channel_context, spender) .await .expect("should get deposit"); assert_eq!( Deposit { - total: BigNum::from(11_999), + total: &one_token + BigNum::from(1_999), // we've just swept the account, so create2 should be empty still_on_create2: BigNum::from(0), }, diff --git a/adapter/src/ethereum/error.rs b/adapter/src/ethereum/error.rs index fa8b6ec40..9553f13f0 100644 --- a/adapter/src/ethereum/error.rs +++ b/adapter/src/ethereum/error.rs @@ -1,6 +1,6 @@ use crate::Error as AdapterError; use primitives::{ - address::Error as AddressError, big_num::ParseBigIntError, Address, ChannelId, ValidatorId, + address::Error as AddressError, big_num::ParseBigIntError, ChainId, ChannelId, ValidatorId, }; use thiserror::Error; @@ -14,7 +14,7 @@ impl From for AdapterError { err @ Error::Web3(..) => AdapterError::adapter(err), err @ Error::InvalidChannelId { .. } => AdapterError::adapter(err), err @ Error::ChannelInactive(..) => AdapterError::adapter(err), - err @ Error::TokenNotWhitelisted(..) => AdapterError::adapter(err), + err @ Error::ChainNotWhitelisted(..) => AdapterError::adapter(err), err @ Error::InvalidDepositAsset(..) => AdapterError::adapter(err), err @ Error::BigNumParsing(..) => AdapterError::adapter(err), err @ Error::SignMessage(..) => AdapterError::adapter(err), @@ -62,8 +62,8 @@ pub enum Error { /// Error occurred during verification of Signature and/or StateRoot and/or Address #[error("Verifying address: {0}")] VerifyAddress(#[from] VerifyError), - #[error("Token not whitelisted: {0}")] - TokenNotWhitelisted(Address), + #[error("The intended {0:?} in the authentication token in not whitelisted")] + ChainNotWhitelisted(ChainId), #[error("Deposit asset {0} is invalid")] InvalidDepositAsset(#[from] AddressError), #[error("Parsing BigNum: {0}")] diff --git a/adapter/src/ethereum/ewt.rs b/adapter/src/ethereum/ewt.rs index 3b58d78f8..71e15f16f 100644 --- a/adapter/src/ethereum/ewt.rs +++ b/adapter/src/ethereum/ewt.rs @@ -5,7 +5,7 @@ use ethstore::{ SafeAccount, }; use once_cell::sync::Lazy; -use primitives::{Address, ValidatorId}; +use primitives::{Address, ChainId, ValidatorId}; use serde::{Deserialize, Serialize}; use web3::signing::keccak256; @@ -52,6 +52,7 @@ pub struct Payload { pub address: Address, #[serde(default, skip_serializing_if = "Option::is_none")] pub identity: Option
, + pub chain_id: ChainId, } impl Payload { @@ -221,7 +222,7 @@ mod test { use primitives::{ config::GANACHE_CONFIG, test_util::{CREATOR, LEADER}, - ValidatorId, + ChainId, ValidatorId, }; use super::*; @@ -239,11 +240,13 @@ mod test { era: 100_000, address: eth_adapter.whoami().to_address(), identity: None, + // Eth + chain_id: ChainId::new(1), }; let wallet = eth_adapter.state.wallet.clone(); let token = Token::sign(&wallet, ð_adapter.state.password, payload) .expect("failed to generate ewt signature"); - let expected = "eyJ0eXAiOiJKV1QiLCJhbGciOiJFVEgifQ.eyJpZCI6IjB4ODA2OTA3NTE5NjlCMjM0Njk3ZTkwNTllMDRlZDcyMTk1YzM1MDdmYSIsImVyYSI6MTAwMDAwLCJhZGRyZXNzIjoiMHhhQ0JhREEyZDU4MzBkMTg3NWFlM0QyZGUyMDdBMTM2M0IzMTZEZjJGIn0.HVZ3qD2pdY_dqgNgJZTB7vhkpKBmMDzQ1tigee1aSd0ugnA_4D12nilJtpfS0KcG7soAMRqwCXw0-1hUqDqUrxsB"; + let expected = "eyJ0eXAiOiJKV1QiLCJhbGciOiJFVEgifQ.eyJpZCI6IjB4ODA2OTA3NTE5NjlCMjM0Njk3ZTkwNTllMDRlZDcyMTk1YzM1MDdmYSIsImVyYSI6MTAwMDAwLCJhZGRyZXNzIjoiMHhhQ0JhREEyZDU4MzBkMTg3NWFlM0QyZGUyMDdBMTM2M0IzMTZEZjJGIiwiY2hhaW5faWQiOjF9.GxF4XDXMx-rRty5zQ7-0nx2VlX51R_uEs_7OfA5ezDcyryUS06IWqVgGIfu4chhRJFP7woZ1YJpARNbCE01nWxwB"; assert_eq!(token.as_str(), expected, "generated wrong ewt signature"); let expected_verification_response = VerifyPayload { @@ -253,6 +256,8 @@ mod test { era: 100_000, address: *CREATOR, identity: None, + // Eth + chain_id: ChainId::new(1), }, }; diff --git a/adapter/src/ethereum/test_util.rs b/adapter/src/ethereum/test_util.rs index 4acce46d5..0e4401c2d 100644 --- a/adapter/src/ethereum/test_util.rs +++ b/adapter/src/ethereum/test_util.rs @@ -10,12 +10,16 @@ use web3::{ use primitives::{ channel::{Channel, Nonce}, - config::TokenInfo, + config::{ChainInfo, TokenInfo, GANACHE_CONFIG}, test_util::{ADVERTISER, CREATOR, FOLLOWER, GUARDIAN, GUARDIAN_2, LEADER, PUBLISHER}, - Address, BigNum, ValidatorId, + Address, BigNum, Chain, ValidatorId, }; -use super::{channel::EthereumChannel, client::Options, IDENTITY_ABI, OUTPACE_ABI, SWEEPER_ABI}; +use super::{ + channel::EthereumChannel, + client::{ChainTransport, Options}, + IDENTITY_ABI, OUTPACE_ABI, SWEEPER_ABI, +}; // See `adex-eth-protocol` `contracts/mocks/Token.sol` /// Mocked Token ABI @@ -87,6 +91,38 @@ pub static KEYSTORES: Lazy> = Lazy::new(|| { /// Local `ganache` is running at: pub const GANACHE_URL: &str = "http://localhost:8545"; +// /// [`Chain`] of the locally running `ganache-cli` chain with id #1 +// pub static GANACHE_1: Lazy = Lazy::new(|| GANACHE_INFO_1.chain.clone()); + +// /// [`ChainInfo`] of the locally running `ganache-cli` chain with id #1 +// pub static GANACHE_INFO_1: Lazy = Lazy::new(|| { +// GANACHE_CONFIG +// .chains +// .get("Ganache #1") +// .expect("Ganache Local chain 1 not found") +// .clone() +// }); + +/// [`Chain`] of the locally running `ganache-cli` chain with id #1337 +pub static GANACHE_1337: Lazy = Lazy::new(|| GANACHE_INFO_1337.chain.clone()); + +/// [`ChainInfo`] of the locally running `ganache-cli` chain with id #1337 +pub static GANACHE_INFO_1337: Lazy = Lazy::new(|| { + GANACHE_CONFIG + .chains + .get("Ganache #1337") + .expect("Ganache Local chain 1337 not found") + .clone() +}); + +/// Initialized Ganache [`Web3`] instance using a Http transport for usage in tests. +/// It uses the [`GANACHE_1337`] to initialize the client. +pub static GANACHE_WEB3: Lazy> = Lazy::new(|| { + GANACHE_1337 + .init_web3() + .expect("Should init the Web3 client") +}); + /// This helper function generates the correct path to the keystore file from this file. /// /// The `file_name` located at `adapter/test/resources` @@ -255,20 +291,21 @@ pub async fn deploy_token_contract( .expect("Invalid ABI of Mock Token contract") .confirmations(0) .options(ContractOptions::with(|opt| { - opt.gas_price = Some(1.into()); - opt.gas = Some(6_721_975.into()); + opt.gas_price = Some(1_i32.into()); + opt.gas = Some(6_721_975_i32.into()); })) .execute(*MOCK_TOKEN_BYTECODE, (), H160(LEADER.to_bytes())) .await?; + let token_address = Address::from(token_contract.address().to_fixed_bytes()); + let token_info = TokenInfo { min_token_units_for_deposit: BigNum::from(min_token_units), precision: NonZeroU8::new(18).expect("should create NonZeroU8"), - // 0.000_1 - min_validator_fee: BigNum::from(100_000_000_000_000), + // 0.000_001 + min_validator_fee: BigNum::from(1_000_000_000_000), + address: token_address, }; - let token_address = Address::from(token_contract.address().to_fixed_bytes()); - Ok((token_info, token_address, token_contract)) } diff --git a/adapter/src/lib.rs b/adapter/src/lib.rs index 1eb34398c..68466dbde 100644 --- a/adapter/src/lib.rs +++ b/adapter/src/lib.rs @@ -13,7 +13,10 @@ pub use { pub mod primitives { use serde::{Deserialize, Serialize}; - pub use ::primitives::{Address, BigNum, Channel, ValidatorId}; + pub use ::primitives::{ + config::{ChainInfo, Config, TokenInfo}, + Address, BigNum, Chain, ChainId, Channel, ValidatorId, + }; use crate::ethereum::WalletState; @@ -43,7 +46,10 @@ pub mod primitives { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Session { pub era: i64, + /// Authenticated as [`Address`]. pub uid: Address, + /// Authenticated for [`Chain`]. + pub chain: Chain, } } diff --git a/adapter/tests/dummy.rs b/adapter/tests/dummy.rs index 9ef965bc6..9e1543490 100644 --- a/adapter/tests/dummy.rs +++ b/adapter/tests/dummy.rs @@ -1,3 +1,5 @@ +use std::num::NonZeroU8; + use adapter::{ prelude::*, primitives::{Deposit, Session}, @@ -6,8 +8,9 @@ use adapter::{ use async_trait::async_trait; use primitives::{ + config::TokenInfo, test_util::{ADDRESS_1, DUMMY_CAMPAIGN}, - Address, BigNum, Channel, ValidatorId, + Address, BigNum, Chain, ChainId, ChainOf, Channel, ValidatorId, }; #[derive(Debug, Clone)] @@ -41,7 +44,7 @@ impl Locked for Dummy { async fn get_deposit( &self, - _channel: &Channel, + _channel_context: &ChainOf, _depositor_address: Address, ) -> Result { Ok(Deposit { @@ -59,7 +62,7 @@ impl Unlocked for Dummy { } // requires Unlocked - fn get_auth(&self, intended_for: ValidatorId) -> Result { + fn get_auth(&self, _for_chain: ChainId, intended_for: ValidatorId) -> Result { Ok(intended_for.to_string()) } } @@ -76,13 +79,36 @@ impl Unlockable for Dummy { async fn main() { let dummy = Dummy { _whoami: () }; + // A dummy Channel Context, with dummy Chain & Token + let channel_context = ChainOf { + context: DUMMY_CAMPAIGN.channel, + token: TokenInfo { + min_token_units_for_deposit: 1_u64.into(), + min_validator_fee: 1_u64.into(), + precision: NonZeroU8::new(18).unwrap(), + address: "0x6B83e7D6B72c098d48968441e0d05658dc17Adb9" + .parse() + .unwrap(), + }, + chain: Chain { + chain_id: ChainId::new(1), + rpc: "http://dummy.com".parse().unwrap(), + outpace: "0x0000000000000000000000000000000000000000" + .parse() + .unwrap(), + sweeper: "0x0000000000000000000000000000000000000000" + .parse() + .unwrap(), + }, + }; + // With new Locked Adapter { let locked_adapter = Adapter::new(dummy.clone()); // Should be able to call get_deposit before unlocking! locked_adapter - .get_deposit(&DUMMY_CAMPAIGN.channel, *ADDRESS_1) + .get_deposit(&channel_context, *ADDRESS_1) .await .expect("Should get deposit"); @@ -96,7 +122,7 @@ async fn main() { // Should be able to call get_deposit after unlocking! unlocked_adapter - .get_deposit(&DUMMY_CAMPAIGN.channel, *ADDRESS_1) + .get_deposit(&channel_context, *ADDRESS_1) .await .expect("Should get deposit"); } @@ -107,7 +133,7 @@ async fn main() { // Should be able to call `get_deposit()` on unlocked adapter unlocked_adapter - .get_deposit(&DUMMY_CAMPAIGN.channel, *ADDRESS_1) + .get_deposit(&channel_context, *ADDRESS_1) .await .expect("Should get deposit"); diff --git a/docs/config/dev.toml b/docs/config/dev.toml index 8e56419d3..40a0cbbe3 100644 --- a/docs/config/dev.toml +++ b/docs/config/dev.toml @@ -6,11 +6,6 @@ campaigns_find_limit = 200 spendable_find_limit = 200 wait_time = 500 -# V4 Deprecated -aggr_throttle = 0 - -events_find_limit = 100 - msgs_find_limit = 10 analytics_find_limit_v5 = 5000 analytics_maxtime_v5 = 20000 @@ -27,39 +22,65 @@ channel_tick_timeout = 5000 ip_rate_limit = { type = 'ip', timeframe = 20000 } sid_rate_limit = { type = 'sid', timeframe = 20000 } -# TODO: Replace with real contract address -outpace_address = '0x333420fc6a897356e69b62417cd17ff012177d2b' -# TODO: Replace with real contract address -sweeper_address = '0x333420fc6a897356e69b62417cd17ff012177d2b' - -ethereum_network = 'http://localhost:8545' - creators_whitelist = [] validators_whitelist = [] admins = ['0xce07CbB7e054514D590a0262C93070D838bFBA2e'] -[[token_address_whitelist]] -# DAI -address = '0x73967c6a0904aa032c103b4104747e88c566b1a2' -# 1 * 10^-10 = 0.0_000_000_001 -min_token_units_for_deposit = '100000000' -min_validator_fee = '100000000' -precision = 18 - -[[token_address_whitelist]] -# USDT -address = '0x509ee0d083ddf8ac028f2a56731412edd63223b9' -# 1.000_000 -min_token_units_for_deposit = '1000000' -# 0.001 -min_validator_fee = '1000' -precision = 6 - -[[token_address_whitelist]] -# USDC -address = '0x44dcfcead37be45206af6079648988b29284b2c6' -# 1.000_000 -min_token_units_for_deposit = '100000000' -# 0.001 -min_validator_fee = '1000' -precision = 6 +# Ethereum mainnet tokens +[chain."Ethereum Mainnet"] +# Ethereum Mainnet for testing +chain_id = 1 +rpc = "http://localhost:8545" +# Start TODO: Replace with Both addresss with the real contracts addresses +outpace = '0x333420fc6a897356e69b62417cd17ff012177d2b' +sweeper = '0x333420fc6a897356e69b62417cd17ff012177d2b' +# End TODO + + [chain."Ethereum Mainnet".token.DAI] + address = '0x6B175474E89094C44Da98b954EedeAC495271d0F' # checked + precision = 18 + # 1 * 10^-10 = 0.0_000_000_001 + min_token_units_for_deposit = '100000000' + min_validator_fee = '100000000' + + [chain."Ethereum Mainnet".token.USDT] + address = '0xdAC17F958D2ee523a2206206994597C13D831ec7' # checked + precision = 6 + # 1.000_000 + min_token_units_for_deposit = '1000000' + # 0.001 + min_validator_fee = '1000' + + [chain."Ethereum Mainnet".token.USDC] + address = '0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48' # checked + precision = 6 + # 1.000_000 + min_token_units_for_deposit = '1000000' + # 0.001 + min_validator_fee = '1000' + + +[chain."Ethereum Ropsten"] +chain_id = 3 +rpc = "http://localhost:8546" +# Start TODO: Replace with Both addresss with the real contracts addresses +outpace = '0x333420fc6a897356e69b62417cd17ff012177d2b' +sweeper = '0x333420fc6a897356e69b62417cd17ff012177d2b' +# End TODO + + [chain."Ethereum Ropsten".token.DAI] + address = '0x65600c50Ea42e225368Ded6c3789a539284aA62C' # checked + precision = 18 + # 1 * 10^-10 = 0.0_000_000_001 + min_token_units_for_deposit = '100000000' + min_validator_fee = '100000000' + + # [chain."Ethereum Ropsten".token.USDT] + # address = '0xaF5DcEBba2f8bEc8729117336b2FE8B4E0D99b0B' + # [chain."Ethereum Ropsten".token.USDC] + # address = '0x8b544241B8115B5Da36d6fa644262e7FE60fE67F' + +# [chain."Binance Smart Chain Mainnet"] +# chain_id = 56 +# rpc = "http://localhost:8545" + diff --git a/docs/config/ganache.toml b/docs/config/ganache.toml index 2236b25e1..8190d1f3b 100644 --- a/docs/config/ganache.toml +++ b/docs/config/ganache.toml @@ -29,24 +29,46 @@ channel_tick_timeout = 10000 ip_rate_limit = { type = 'ip', timeframe = 1200000 } sid_rate_limit = { type = 'sid', timeframe = 0 } -# Ganache Snapshot address -outpace_address = '0xabc27d46a458e2e49dabfef45ca74dedbac3dd06' -# Ganache Snapshot address -sweeper_address = '0x7dd57c0324284102a757153e18f2cb1acdb7d2bd' - -ethereum_network = 'http://localhost:8545' - creators_whitelist = [] validators_whitelist = [] admins = ['0x80690751969B234697e9059e04ed72195c3507fa'] -[[token_address_whitelist]] -# Mocked TOKEN -address = '0x2bcaf6968aec8a3b5126fbfab5fd419da6e8ad8e' -# 1 * 10^18 = 1.0000 TOKEN -min_token_units_for_deposit = '1000000000000000000' -# multiplier = 10^13 - 10^18 (token precision) = 10^-5 -# min_validator_fee = 1 * 10^-5 = 0.000_01 -min_validator_fee = '10000000000000' -precision = 18 +# Ethereum mainnet tokens +# [chain."Ganache #1"] +# # Ethereum Mainnet for testing +# chain_id = 1 +# rpc = 'http://localhost:8546' +# # Ganache Snapshot address +# outpace = '0x0000000000000000000000000000000000000000' +# # Ganache Snapshot address +# sweeper = '0x0000000000000000000000000000000000000000' + +# [chain."Ganache #1".token."Mocked TOKEN"] +# address = '0x2bcaf6968aec8a3b5126fbfab5fd419da6e8ad8e' # checked +# precision = 18 +# # Minimum token units for the Create2 deposits to count +# # 1 * 10^18 = 1.0000 TOKEN +# min_token_units_for_deposit = '1000000000000000000' +# # multiplier = 10^12 - 10^18 (token precision) = 10^-6 +# # min_validator_fee = 1 * 10^-6 = 0.000_001 +# min_validator_fee = '1000000000000' + +[chain."Ganache #1337"] +# Ganache default chain id for testing +chain_id = 1337 +rpc = 'http://localhost:8545' +# Ganache Snapshot address +outpace = '0xAbc27d46a458E2e49DaBfEf45ca74dEDBAc3DD06' +# Ganache Snapshot address +sweeper = '0x7dD57C0324284102A757153E18F2Cb1ACdB7d2bD' + + [chain."Ganache #1337".token."Mocked TOKEN"] + address = '0x2bcaf6968aec8a3b5126fbfab5fd419da6e8ad8e' # checked + precision = 18 + # Minimum token units for the Create2 deposits to count + # 1 * 10^18 = 1.0000 TOKEN + min_token_units_for_deposit = '1000000000000000000' + # multiplier = 10^12 - 10^18 (token precision) = 10^-6 + # min_validator_fee = 1 * 10^-6 = 0.000_001 + min_validator_fee = '1000000000000' diff --git a/docs/config/prod.toml b/docs/config/prod.toml index 549b9ed52..e6b0eb111 100644 --- a/docs/config/prod.toml +++ b/docs/config/prod.toml @@ -7,11 +7,6 @@ spendable_find_limit = 512 wait_time = 40000 -# V4 Deprecated -aggr_throttle = 0 - -events_find_limit = 100 - msgs_find_limit = 10 analytics_find_limit_v5 = 5000 analytics_maxtime_v5 = 15000 diff --git a/primitives/src/big_num.rs b/primitives/src/big_num.rs index 422c83122..3634d068c 100644 --- a/primitives/src/big_num.rs +++ b/primitives/src/big_num.rs @@ -14,7 +14,19 @@ use crate::UnifiedNum; /// Re-export of the [`num::bigint::ParseBigIntError`] when using [`BigNum`] pub use num::bigint::ParseBigIntError; #[derive( - Serialize, Deserialize, Clone, PartialEq, Eq, PartialOrd, Ord, NumOps, One, Zero, Num, Default, + Serialize, + Deserialize, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + NumOps, + One, + Zero, + Num, + Default, + Hash, )] pub struct BigNum( #[serde( @@ -168,6 +180,15 @@ impl Pow for BigNum { } } +impl Add<&BigNum> for BigNum { + type Output = BigNum; + + fn add(self, rhs: &BigNum) -> Self::Output { + let big_uint = &self.0 + &rhs.0; + BigNum(big_uint) + } +} + impl Add<&BigNum> for &BigNum { type Output = BigNum; @@ -177,6 +198,15 @@ impl Add<&BigNum> for &BigNum { } } +impl Add for &BigNum { + type Output = BigNum; + + fn add(self, rhs: BigNum) -> Self::Output { + let big_uint = &self.0 + &rhs.0; + BigNum(big_uint) + } +} + impl AddAssign<&BigNum> for BigNum { fn add_assign(&mut self, rhs: &BigNum) { self.0 += &rhs.0 diff --git a/primitives/src/campaign_validator.rs b/primitives/src/campaign_validator.rs index 5fad5e82a..56908d2bc 100644 --- a/primitives/src/campaign_validator.rs +++ b/primitives/src/campaign_validator.rs @@ -1,14 +1,16 @@ use crate::{ - campaign::Validators, - config::{Config, TokenInfo}, - Address, Campaign, UnifiedNum, ValidatorId, + campaign::Validators, config::Config, Address, Campaign, ChainOf, UnifiedNum, ValidatorId, }; use chrono::Utc; -use std::{cmp::PartialEq, collections::HashMap}; +use std::cmp::PartialEq; use thiserror::Error; pub trait Validator { - fn validate(&self, config: &Config, validator_identity: &ValidatorId) -> Result<(), Error>; + fn validate( + self, + config: &Config, + validator_identity: &ValidatorId, + ) -> Result, Error>; } #[derive(Debug, PartialEq, Eq, Clone, Copy)] @@ -42,7 +44,11 @@ impl From for Error { } impl Validator for Campaign { - fn validate(&self, config: &Config, validator_identity: &ValidatorId) -> Result<(), Error> { + fn validate( + self, + config: &Config, + validator_identity: &ValidatorId, + ) -> Result, Error> { // check if the channel validators include our adapter identity let whoami_validator = match self.find_validator(validator_identity) { Some(role) => role.into_inner(), @@ -57,19 +63,20 @@ impl Validator for Campaign { return Err(Validation::UnlistedValidator.into()); } - if !creator_listed(self, &config.creators_whitelist) { + if !creator_listed(&self, &config.creators_whitelist) { return Err(Validation::UnlistedCreator.into()); } - // Check if the token is listed in the Configuration - let token_info = config - .token_address_whitelist - .get(&self.channel.token) + // Check if Channel token is listed in the configuration token Chain ID & Address + let chain_context = config + .find_chain_token(self.channel.token) .ok_or(Validation::UnlistedAsset)?; // Check if the campaign budget is above the minimum deposit configured - if self.budget.to_precision(token_info.precision.get()) - < token_info.min_token_units_for_deposit + if self + .budget + .to_precision(chain_context.token.precision.get()) + < chain_context.token.min_token_units_for_deposit { return Err(Validation::MinimumDepositNotMet.into()); } @@ -77,8 +84,8 @@ impl Validator for Campaign { // Check if the validator fee is greater than the minimum configured fee if whoami_validator .fee - .to_precision(token_info.precision.get()) - < token_info.min_validator_fee + .to_precision(chain_context.token.precision.get()) + < chain_context.token.min_validator_fee { return Err(Validation::MinimumValidatorFeeNotMet.into()); } @@ -95,7 +102,7 @@ impl Validator for Campaign { return Err(Validation::FeeConstraintViolated.into()); } - Ok(()) + Ok(chain_context.with_campaign(self)) } } @@ -123,28 +130,18 @@ pub fn creator_listed(campaign: &Campaign, whitelist: &[Address]) -> bool { .any(|allowed| allowed.eq(&campaign.creator)) } -pub fn asset_listed(campaign: &Campaign, whitelist: &HashMap) -> bool { - // if the list is empty, return true, as we don't have a whitelist to restrict us to - // or if we have a list, check if it includes the `channel.deposit_asset` - whitelist.is_empty() - || whitelist - .keys() - .any(|allowed| allowed == &campaign.channel.token) -} - #[cfg(test)] mod test { use super::*; use crate::{ - config, + config::{self, GANACHE_CONFIG}, util::tests::prep_db::{ ADDRESSES, DUMMY_CAMPAIGN, DUMMY_VALIDATOR_FOLLOWER, DUMMY_VALIDATOR_LEADER, IDS, - TOKENS, }, BigNum, }; use chrono::{TimeZone, Utc}; - use std::{num::NonZeroU8, str::FromStr}; + use std::str::FromStr; #[test] fn are_validators_listed() { @@ -189,89 +186,93 @@ mod test { } #[test] - fn is_asset_listed() { + fn chain_and_token_whitelist_validation() { let campaign = DUMMY_CAMPAIGN.clone(); - let mut assets = HashMap::new(); - // empty hashmap - let is_listed = asset_listed(&campaign, &assets); - assert!(is_listed); + // no configured Chains & Tokens + { + let mut config = GANACHE_CONFIG.clone(); + config.chains.clear(); - // not listed + let result = campaign.clone().validate(&config, &campaign.channel.leader); - assets.insert( - TOKENS["USDC"], - TokenInfo { - min_token_units_for_deposit: BigNum::from(0), - min_validator_fee: BigNum::from(0), - precision: NonZeroU8::new(6).expect("should create NonZeroU8"), - }, - ); - let is_listed = asset_listed(&campaign, &assets); - assert!(!is_listed); + assert!(matches!( + result, + Err(Error::Validation(Validation::UnlistedAsset)) + )); + } - // listed - assets.insert( - TOKENS["DAI"], - TokenInfo { - min_token_units_for_deposit: BigNum::from(0), - min_validator_fee: BigNum::from(0), - precision: NonZeroU8::new(18).expect("should create NonZeroU8"), - }, - ); - let is_listed = asset_listed(&campaign, &assets); - assert!(is_listed); + { + let config = GANACHE_CONFIG.clone(); + + let _campaign_context = campaign + .clone() + .validate(&config, &campaign.channel.leader) + .expect( + "Default development config should contain the dummy campaign.channel.token", + ); + } } #[test] fn are_campaigns_validated() { - let config = config::DEVELOPMENT_CONFIG.clone(); + let config = config::GANACHE_CONFIG.clone(); // Validator not in campaign { let campaign = DUMMY_CAMPAIGN.clone(); - let is_validated = campaign.validate(&config, &IDS["tester"]); - assert!(matches!( - is_validated, - Err(Error::Validation(Validation::AdapterNotIncluded)) - )); + + let validation_error = campaign + .validate(&config, &IDS["tester"]) + .expect_err("Should trigger validation error"); + assert_eq!( + Error::Validation(Validation::AdapterNotIncluded), + validation_error, + ); } // active.to has passed { let mut campaign = DUMMY_CAMPAIGN.clone(); campaign.active.to = Utc.ymd(2019, 1, 30).and_hms(0, 0, 0); - let is_validated = campaign.validate(&config, &IDS["leader"]); - assert!(matches!( - is_validated, - Err(Error::Validation(Validation::InvalidActiveTo)) - )); + + let validation_error = campaign + .validate(&config, &IDS["leader"]) + .expect_err("Should trigger validation error"); + assert_eq!( + Error::Validation(Validation::InvalidActiveTo), + validation_error, + ); } // all_validators not listed { let campaign = DUMMY_CAMPAIGN.clone(); - let mut config = config::DEVELOPMENT_CONFIG.clone(); + let mut config = config::GANACHE_CONFIG.clone(); config.validators_whitelist = vec![IDS["leader"], IDS["tester"]]; - let is_validated = campaign.validate(&config, &IDS["leader"]); - assert!(matches!( - is_validated, - Err(Error::Validation(Validation::UnlistedValidator)) - )); + let validation_error = campaign + .validate(&config, &IDS["leader"]) + .expect_err("Should trigger validation error"); + assert_eq!( + Error::Validation(Validation::UnlistedValidator), + validation_error, + ); } // creator not listed { let campaign = DUMMY_CAMPAIGN.clone(); - let mut config = config::DEVELOPMENT_CONFIG.clone(); + let mut config = config::GANACHE_CONFIG.clone(); config.creators_whitelist = vec![ADDRESSES["tester"]]; - let is_validated = campaign.validate(&config, &IDS["leader"]); - assert!(matches!( - is_validated, - Err(Error::Validation(Validation::UnlistedCreator)) - )); + let validation_error = campaign + .validate(&config, &IDS["leader"]) + .expect_err("Should trigger validation error"); + assert_eq!( + Error::Validation(Validation::UnlistedCreator), + validation_error, + ); } // token not listed @@ -281,11 +282,13 @@ mod test { .parse::
() .expect("Should parse"); - let is_validated = campaign.validate(&config, &IDS["leader"]); - assert!(matches!( - is_validated, - Err(Error::Validation(Validation::UnlistedAsset)) - )); + let validation_error = campaign + .validate(&config, &IDS["leader"]) + .expect_err("Should trigger validation error"); + assert_eq!( + Error::Validation(Validation::UnlistedAsset), + validation_error, + ); } // budget < min_deposit @@ -293,63 +296,161 @@ mod test { let mut campaign = DUMMY_CAMPAIGN.clone(); campaign.budget = UnifiedNum::from_u64(0); - let is_validated = campaign.validate(&config, &IDS["leader"]); - assert!(matches!( - is_validated, - Err(Error::Validation(Validation::MinimumDepositNotMet)) - )); + let validation_error = campaign + .validate(&config, &IDS["leader"]) + .expect_err("Should trigger validation error"); + assert_eq!( + Error::Validation(Validation::MinimumDepositNotMet), + validation_error, + ); } // validator_fee < min_fee { let campaign = DUMMY_CAMPAIGN.clone(); - let mut config = config::DEVELOPMENT_CONFIG.clone(); - - config.token_address_whitelist.insert( - TOKENS["DAI"], - TokenInfo { - min_token_units_for_deposit: BigNum::from(0), - min_validator_fee: BigNum::from_str("999999999999999999999999999999999999") - .expect("should get BigNum"), - precision: NonZeroU8::new(18).expect("should create NonZeroU8"), - }, + let mut config = config::GANACHE_CONFIG.clone(); + + let mut token_info = config + .chains + .values_mut() + .find_map(|chain_info| { + chain_info + .tokens + .values_mut() + .find(|token_info| token_info.address == campaign.channel.token) + }) + .expect("Should find Dummy campaign.channel.token"); + token_info.min_validator_fee = BigNum::from_str("999999999999999999999999999999999999") + .expect("Should parse BigNum"); + + let validation_error = campaign + .validate(&config, &IDS["leader"]) + .expect_err("Should trigger validation error"); + assert_eq!( + Error::Validation(Validation::MinimumValidatorFeeNotMet), + validation_error, ); - - let is_validated = campaign.validate(&config, &IDS["leader"]); - assert!(matches!( - is_validated, - Err(Error::Validation(Validation::MinimumValidatorFeeNotMet)) - )); } + let sum_fees = |validators: &Validators| -> UnifiedNum { + validators + .iter() + .map(|validator| validator.fee) + .sum::>() + .expect("Validators sum of fees should not overflow") + }; + // total_fee > budget + // budget = total_fee - 1 { let mut campaign = DUMMY_CAMPAIGN.clone(); - campaign.budget = UnifiedNum::from_u64(150); // both fees are 100, so this won't cover them - - let is_validated = campaign.validate(&config, &IDS["leader"]); - assert!(matches!( - is_validated, - Err(Error::Validation(Validation::FeeConstraintViolated)) - )); + let campaign_token = config + .find_chain_token(campaign.channel.token) + .unwrap() + .token; + + // makes the sum of all validator fees = 2 * min token units for deposit + campaign.validators = { + let new_validators = campaign + .validators + .iter() + .map(|validator| { + let mut new_validator = validator.clone(); + new_validator.fee = UnifiedNum::from_precision( + campaign_token.min_token_units_for_deposit.clone(), + campaign_token.precision.into(), + ) + .expect("Should not overflow"); + + new_validator + }) + .collect::>(); + + assert_eq!( + 2, + new_validators.len(), + "Dummy Campaign validators should always be 2 - a leader & a follower" + ); + + Validators::new((new_validators[0].clone(), new_validators[1].clone())) + }; + + campaign.budget = sum_fees(&campaign.validators) - UnifiedNum::from(1); + + let validation_error = campaign + .validate(&config, &IDS["leader"]) + .expect_err("Should trigger validation error"); + assert_eq!( + Error::Validation(Validation::FeeConstraintViolated), + validation_error, + ); } // total_fee = budget { let mut campaign = DUMMY_CAMPAIGN.clone(); - campaign.budget = UnifiedNum::from_u64(200); - let is_validated = campaign.validate(&config, &IDS["leader"]); - assert!(matches!( - is_validated, - Err(Error::Validation(Validation::FeeConstraintViolated)) - )); + let campaign_token = config + .find_chain_token(campaign.channel.token) + .unwrap() + .token; + + // makes the sum of all validator fees = 2 * min token units for deposit + campaign.validators = { + let new_validators = campaign + .validators + .iter() + .map(|validator| { + let mut new_validator = validator.clone(); + new_validator.fee = UnifiedNum::from_precision( + campaign_token.min_token_units_for_deposit.clone(), + campaign_token.precision.into(), + ) + .expect("Should not overflow"); + + new_validator + }) + .collect::>(); + + assert_eq!( + 2, + new_validators.len(), + "Dummy Campaign validators should always be 2 - a leader & a follower" + ); + + Validators::new((new_validators[0].clone(), new_validators[1].clone())) + }; + + campaign.budget = sum_fees(&campaign.validators); + + let validation_error = campaign + .validate(&config, &IDS["leader"]) + .expect_err("Should trigger validation error"); + assert_eq!( + Error::Validation(Validation::FeeConstraintViolated), + validation_error, + ); } + // should validate { let campaign = DUMMY_CAMPAIGN.clone(); - let is_validated = campaign.validate(&config, &IDS["leader"]); - assert!(is_validated.is_ok()); + let _campaign_context = campaign + .validate(&config, &IDS["leader"]) + .expect("Should pass validation"); + } + } + + #[test] + fn test_valid_campaign() { + let config = config::GANACHE_CONFIG.clone(); + // 1000000000000 + // 10000000000000 + { + let campaign = DUMMY_CAMPAIGN.clone(); + let _campaign_context = campaign + .validate(&config, &IDS["leader"]) + .expect("Should pass validation"); } } } diff --git a/primitives/src/chain.rs b/primitives/src/chain.rs index 0eb986e07..b949b3aab 100644 --- a/primitives/src/chain.rs +++ b/primitives/src/chain.rs @@ -1,64 +1,32 @@ -use once_cell::sync::Lazy; use serde::{Deserialize, Serialize}; -use std::{collections::HashMap, fmt}; +use std::fmt; -pub static CHAINS: Lazy> = Lazy::new(|| { - let mut map = HashMap::new(); - map.insert( - ChainId(1), - Chain { - chain_id: ChainId(1), - name: "Ethereum Mainnet", - short: "eth", - network: "mainnet", - }, - ); - - map.insert( - ChainId(5), - Chain { - chain_id: ChainId(5), - name: "Ethereum Testnet Görli", - short: "gor", - network: "goerli", - }, - ); - - map.insert( - ChainId(100), - Chain { - chain_id: ChainId(100), - name: "xDAI Chain", - short: "xdai", - network: "mainnet", - }, - ); - - map -}); - -/// Ethereum Virtual Machine Chain -/// see https://chainid.network -pub struct Chain { - pub chain_id: ChainId, - pub name: &'static str, - pub short: &'static str, - pub network: &'static str, -} +use crate::{config::TokenInfo, util::ApiUrl, Address, Campaign, Channel}; +/// The Id of the chain +/// +/// # Ethereum Virtual Machine +/// +/// For all the EVM-compatible Chain IDs visit https://chainid.network #[derive(Serialize, Deserialize, Hash, Clone, Copy, Eq, PartialEq)] #[serde(transparent)] pub struct ChainId(u32); impl ChainId { + /// # Panics: + /// + /// If `id` is `0`. pub fn new(id: u32) -> Self { + assert!(id != 0); + Self(id) } } -/// Default ChainId: 1 - Ethereum Mainnet -pub fn eth_mainnet() -> ChainId { - ChainId(1) +impl From for ChainId { + fn from(id: u32) -> Self { + Self::new(id) + } } impl fmt::Debug for ChainId { @@ -66,3 +34,77 @@ impl fmt::Debug for ChainId { write!(f, "ChainId({})", self.0) } } + +/// Ethereum Virtual Machine Chain +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] +pub struct Chain { + pub chain_id: ChainId, + /// RPC url of the chain which will be used for Blockchain interactions. + /// + /// # Ethereum Virtual Machine + /// Check out the metadata for all EVM-chains: https://github.com/ethereum-lists/chains + /// Or in json: https://chainid.network/chains.json + pub rpc: ApiUrl, + /// The OUTPACE contract address on this Chain + pub outpace: Address, + /// The Sweeper contract address on this Chain + pub sweeper: Address, +} + +/// Context of [`TokenInfo`] & [`Chain`] information for given [`Channel`] or [`Campaign`]. +/// The additional context of [`Channel`] is known after checking if the Channel token's +/// Chain & Address are whitelisted in the configuration. +#[derive(Debug, Deserialize, Serialize, PartialEq, Eq, Hash, Clone)] +pub struct ChainOf { + pub context: T, + pub chain: Chain, + pub token: TokenInfo, +} + +impl ChainOf { + pub fn with(self, context: C) -> ChainOf { + ChainOf { + context, + chain: self.chain, + token: self.token, + } + } +} + +impl ChainOf<()> { + pub fn new(chain: Chain, token: TokenInfo) -> ChainOf<()> { + ChainOf { + context: (), + chain, + token, + } + } + + pub fn with_channel(self, channel: Channel) -> ChainOf { + ChainOf { + context: channel, + chain: self.chain, + token: self.token, + } + } + + pub fn with_campaign(self, campaign: Campaign) -> ChainOf { + ChainOf { + context: campaign, + chain: self.chain, + token: self.token, + } + } +} + +impl ChainOf { + /// Get a [`Channel`] with [`Chain`] & [`TokenInfo`] context from + /// the given [`Campaign`]. + pub fn of_channel(&self) -> ChainOf { + ChainOf { + context: self.context.channel, + token: self.token.clone(), + chain: self.chain.clone(), + } + } +} diff --git a/primitives/src/channel.rs b/primitives/src/channel.rs index e10b3e814..9c45e65a5 100644 --- a/primitives/src/channel.rs +++ b/primitives/src/channel.rs @@ -7,7 +7,7 @@ use serde_hex::{SerHex, StrictPfx}; use hex::{FromHex, FromHexError}; -use crate::{Address, Validator, ValidatorId}; +use crate::{chain::Chain, config::TokenInfo, Address, Validator, ValidatorId}; #[derive(Serialize, Deserialize, PartialEq, Eq, Copy, Clone, Hash)] #[serde(transparent)] @@ -90,7 +90,13 @@ impl FromStr for ChannelId { } } -#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ChainContext { + pub token: TokenInfo, + pub chain: Chain, +} + +#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq, Hash)] #[serde(rename_all = "camelCase")] pub struct Channel { pub leader: ValidatorId, @@ -150,7 +156,7 @@ impl fmt::Display for Nonce { impl fmt::Debug for Nonce { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Nonce({})", self.0.to_string()) + write!(f, "Nonce({})", self.0) } } diff --git a/primitives/src/config.rs b/primitives/src/config.rs index 5bff3a21e..89976f0f7 100644 --- a/primitives/src/config.rs +++ b/primitives/src/config.rs @@ -1,17 +1,15 @@ -use crate::{event_submission::RateLimit, Address, BigNum, ValidatorId}; +use crate::{ + chain::{Chain, ChainId}, + event_submission::RateLimit, + Address, BigNum, ChainOf, ValidatorId, +}; use once_cell::sync::Lazy; -use serde::{Deserialize, Deserializer, Serialize}; -use serde_hex::{SerHex, StrictPfx}; +use serde::{Deserialize, Serialize}; use std::{collections::HashMap, num::NonZeroU8}; use thiserror::Error; pub use toml::de::Error as TomlError; -pub static DEVELOPMENT_CONFIG: Lazy = Lazy::new(|| { - toml::from_str(include_str!("../../docs/config/dev.toml")) - .expect("Failed to parse dev.toml config file") -}); - pub static PRODUCTION_CONFIG: Lazy = Lazy::new(|| { toml::from_str(include_str!("../../docs/config/prod.toml")) .expect("Failed to parse prod.toml config file") @@ -27,6 +25,7 @@ pub static GANACHE_CONFIG: Lazy = Lazy::new(|| { /// The environment in which the application is running /// Defaults to [`Environment::Development`] pub enum Environment { + /// The default development setup is running `ganache-cli` locally. Development, Production, } @@ -37,25 +36,15 @@ impl Default for Environment { } } -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct TokenInfo { - pub min_token_units_for_deposit: BigNum, - pub min_validator_fee: BigNum, - pub precision: NonZeroU8, -} - #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all(serialize = "SCREAMING_SNAKE_CASE"))] pub struct Config { + /// Maximum number of channels to return per request pub max_channels: u32, pub channels_find_limit: u32, pub campaigns_find_limit: u32, pub spendable_find_limit: u32, pub wait_time: u32, - #[deprecated = "redundant V4 value. No aggregates are needed for V5"] - pub aggr_throttle: u32, - #[deprecated = "For V5 this should probably be part of the Analytics"] - pub events_find_limit: u32, pub msgs_find_limit: u32, pub analytics_find_limit_v5: u32, /// In milliseconds @@ -76,60 +65,78 @@ pub struct Config { pub all_campaigns_timeout: u32, /// In Milliseconds pub channel_tick_timeout: u32, - pub ip_rate_limit: RateLimit, // HashMap?? - pub sid_rate_limit: RateLimit, // HashMap ?? - #[serde(with = "SerHex::")] - pub outpace_address: [u8; 20], - #[serde(with = "SerHex::")] - pub sweeper_address: [u8; 20], - pub ethereum_network: String, + pub ip_rate_limit: RateLimit, + pub sid_rate_limit: RateLimit, pub creators_whitelist: Vec
, pub validators_whitelist: Vec, pub admins: Vec, - #[serde(deserialize_with = "deserialize_token_whitelist")] - pub token_address_whitelist: HashMap, + /// The key of this map is a human-readable text of the Chain name + /// for readability in the configuration file. + /// + /// - To get the chain of a token address use [`Config::find_token_chain`]. + /// + /// - To get a chain RPC use [`Config::find_chain_rpc`]. + /// + /// **NOTE:** Make sure that a Token [`Address`] is unique across all Chains, + /// otherwise `Config::find_chain_token` will fetch only one of them and cause unexpected problems. + #[serde(rename = "chain")] + pub chains: HashMap, } impl Config { - /// Utility method that will deserialize a Toml file content into a `Config`. + /// Utility method that will deserialize a Toml file content into a [`Config`]. /// /// Instead of relying on the `toml` crate directly, use this method instead. pub fn try_toml(toml: &str) -> Result { toml::from_str(toml) } -} -#[derive(Serialize, Deserialize, Debug, Clone)] -struct ConfigWhitelist { - address: Address, - min_token_units_for_deposit: BigNum, - min_validator_fee: BigNum, - precision: NonZeroU8, + /// Finds a [`Chain`] based on the [`ChainId`]. + pub fn find_chain(&self, chain_id: ChainId) -> Option<&ChainInfo> { + self.chains + .values() + .find(|chain_info| chain_info.chain.chain_id == chain_id) + } + + /// Finds the pair of Chain & Token, given only a token [`Address`]. + pub fn find_chain_token(&self, token: Address) -> Option> { + self.chains.values().find_map(|chain_info| { + chain_info + .tokens + .values() + .find(|token_info| token_info.address == token) + .map(|token_info| ChainOf::new(chain_info.chain.clone(), token_info.clone())) + }) + } } -fn deserialize_token_whitelist<'de, D>( - deserializer: D, -) -> Result, D::Error> -where - D: Deserializer<'de>, -{ - let array: Vec = Deserialize::deserialize(deserializer)?; +/// Configured chain with tokens. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct ChainInfo { + #[serde(flatten)] + pub chain: Chain, + /// A Chain should always have whitelisted tokens configured, + /// otherwise there is no reason to have the chain set up. + #[serde(rename = "token")] + pub tokens: HashMap, +} - let tokens_whitelist: HashMap = array - .into_iter() - .map(|config_whitelist| { - ( - config_whitelist.address, - TokenInfo { - min_token_units_for_deposit: config_whitelist.min_token_units_for_deposit, - min_validator_fee: config_whitelist.min_validator_fee, - precision: config_whitelist.precision, - }, - ) - }) - .collect(); +impl ChainInfo { + pub fn find_token(&self, token: Address) -> Option<&TokenInfo> { + self.tokens + .values() + .find(|token_info| token_info.address == token) + } +} - Ok(tokens_whitelist) +/// Configured Token in a specific [`Chain`]. +/// Precision can differ for the same token from one [`Chain`] to another. +#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq, Hash)] +pub struct TokenInfo { + pub min_token_units_for_deposit: BigNum, + pub min_validator_fee: BigNum, + pub precision: NonZeroU8, + pub address: Address, } #[derive(Debug, Error)] @@ -140,6 +147,8 @@ pub enum ConfigError { InvalidFile(#[from] std::io::Error), } +/// If no `config_file` path is provided it will load the [`Environment`] configuration. +/// If `config_file` path is provided it will try to read and parse the file in Toml format. pub fn configuration( environment: Environment, config_file: Option<&str>, @@ -152,7 +161,7 @@ pub fn configuration( } None => match environment { Environment::Production => Ok(PRODUCTION_CONFIG.clone()), - Environment::Development => Ok(DEVELOPMENT_CONFIG.clone()), + Environment::Development => Ok(GANACHE_CONFIG.clone()), }, } } diff --git a/primitives/src/ipfs.rs b/primitives/src/ipfs.rs index 15564a1b8..b50bd852c 100644 --- a/primitives/src/ipfs.rs +++ b/primitives/src/ipfs.rs @@ -25,7 +25,7 @@ impl slog::Value for IPFS { impl fmt::Debug for IPFS { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "IPFS({})", self.0.to_string()) + write!(f, "IPFS({})", self.0) } } @@ -209,7 +209,7 @@ impl fmt::Display for Url { impl fmt::Debug for Url { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Url({})", self.to_string()) + write!(f, "Url({})", self) } } diff --git a/primitives/src/lib.rs b/primitives/src/lib.rs index 94522344a..886d0056f 100644 --- a/primitives/src/lib.rs +++ b/primitives/src/lib.rs @@ -11,6 +11,7 @@ pub use self::{ balances_map::{BalancesMap, UnifiedMap}, big_num::BigNum, campaign::{Campaign, CampaignId}, + chain::{Chain, ChainId, ChainOf}, channel::{Channel, ChannelId}, config::Config, deposit::Deposit, @@ -29,6 +30,7 @@ pub mod balances_map; pub mod big_num; pub mod campaign; pub mod campaign_validator; +mod chain; pub mod channel; pub mod config; mod eth_checksum; diff --git a/primitives/src/util/api.rs b/primitives/src/util/api.rs index 5670e4330..ee7f25c08 100644 --- a/primitives/src/util/api.rs +++ b/primitives/src/util/api.rs @@ -64,6 +64,10 @@ impl ApiUrl { pub fn to_url(&self) -> Url { self.0.clone() } + + pub fn as_str(&self) -> &str { + self.0.as_str() + } } impl fmt::Debug for ApiUrl { diff --git a/primitives/src/util/tests/prep_db.rs b/primitives/src/util/tests/prep_db.rs index 6dcd2e013..a281dc5ec 100644 --- a/primitives/src/util/tests/prep_db.rs +++ b/primitives/src/util/tests/prep_db.rs @@ -1,12 +1,12 @@ use crate::{ campaign::{Active, Pricing, PricingBounds, Validators}, channel::Nonce, + config::GANACHE_CONFIG, targeting::Rules, - AdUnit, Address, Campaign, Channel, ChannelId, EventSubmission, UnifiedNum, ValidatorDesc, - ValidatorId, IPFS, + AdUnit, Address, Campaign, Channel, EventSubmission, UnifiedNum, ValidatorDesc, ValidatorId, + IPFS, }; use chrono::{TimeZone, Utc}; -use hex::FromHex; use once_cell::sync::Lazy; use std::collections::HashMap; @@ -96,31 +96,6 @@ pub static ADDRESSES: Lazy> = Lazy::new(|| { addresses }); -// These are the Goerli testnet [`Addresses`] of the following stablecoins: -pub static TOKENS: Lazy> = Lazy::new(|| { - let mut tokens = HashMap::new(); - - tokens.insert( - "DAI".into(), - "0x73967c6a0904aa032c103b4104747e88c566b1a2" - .parse::
() - .expect("Should parse"), - ); - tokens.insert( - "USDT".into(), - "0x509ee0d083ddf8ac028f2a56731412edd63223b9" - .parse::
() - .expect("failed to parse id"), - ); - tokens.insert( - "USDC".into(), - "0x44dcfcead37be45206af6079648988b29284b2c6" - .parse::
() - .expect("failed to parse id"), - ); - tokens -}); - // Dummy adapter auth tokens // authorization tokens pub static AUTH: Lazy> = Lazy::new(|| { @@ -156,7 +131,16 @@ pub static DUMMY_VALIDATOR_FOLLOWER: Lazy = Lazy::new(|| Validato fee_addr: None, }); +/// Dummy Campaign uses Ganache #1337 with the mocked token pub static DUMMY_CAMPAIGN: Lazy = Lazy::new(|| { + let token_info = GANACHE_CONFIG + .chains + .get("Ganache #1337") + .unwrap() + .tokens + .get("Mocked TOKEN") + .unwrap(); + Campaign { id: "0x936da01f9abd4d9d80c702af85c822a8" .parse() @@ -165,7 +149,7 @@ pub static DUMMY_CAMPAIGN: Lazy = Lazy::new(|| { leader: IDS["leader"], follower: IDS["follower"], guardian: IDS["tester"].to_address(), - token: TOKENS["DAI"], + token: token_info.address, nonce: Nonce::from(987_654_321_u32), }, creator: IDS["creator"].to_address(), @@ -197,11 +181,6 @@ pub static DUMMY_CAMPAIGN: Lazy = Lazy::new(|| { } }); -pub static DUMMY_CHANNEL_ID: Lazy = Lazy::new(|| { - ChannelId::from_hex("061d5e2a67d0a9a10f1c732bca12a676d83f79663a396f7d87b3e30b9b411088") - .expect("prep_db: failed to deserialize channel id") -}); - pub static DUMMY_AD_UNITS: Lazy<[AdUnit; 4]> = Lazy::new(|| { [ AdUnit { diff --git a/scripts/ethereum/ganache-cli.sh b/scripts/ethereum/ganache-cli.sh index 2544b7a2d..01fb5815f 100755 --- a/scripts/ethereum/ganache-cli.sh +++ b/scripts/ethereum/ganache-cli.sh @@ -37,6 +37,7 @@ # gasLimit = 4503599627370495 (0xfffffffffffff) # node /app/ganache-core.docker.cli.js --gasLimit 0xfffffffffffff \ + --chainId=1337 \ --db="./snapshot" \ --deterministic \ --mnemonic="diary west sketch curious expose decade symptom height minor layer carry man" \ diff --git a/sentry/Cargo.toml b/sentry/Cargo.toml index 3b45ea526..f1f7ba799 100644 --- a/sentry/Cargo.toml +++ b/sentry/Cargo.toml @@ -11,7 +11,7 @@ edition = "2021" [features] -test-util = ["primitives/test-util"] +test-util = ["primitives/test-util", "adapter/test-util"] [dependencies] # Futures @@ -20,7 +20,7 @@ async-std = "^1" async-trait = "^0.1" # Primitives primitives = { path = "../primitives", features = ["postgres", "test-util"] } -adapter = { version = "0.2", path = "../adapter" } +adapter = { version = "0.2", path = "../adapter", features = ["test-util"] } chrono = { version = "0.4", features = ["serde"] } dashmap = "4" hex = "0.4" diff --git a/sentry/src/access.rs b/sentry/src/access.rs index 477e72c2d..09be44810 100644 --- a/sentry/src/access.rs +++ b/sentry/src/access.rs @@ -169,7 +169,7 @@ mod test { use chrono::TimeZone; use primitives::{ - config::DEVELOPMENT_CONFIG, + config::GANACHE_CONFIG, event_submission::{RateLimit, Rule}, sentry::Event, util::tests::prep_db::{ADDRESSES, DUMMY_CAMPAIGN, IDS}, @@ -187,7 +187,7 @@ mod test { async fn setup() -> (Config, Object) { let connection = TESTS_POOL.get().await.expect("Should return Object"); - let config = DEVELOPMENT_CONFIG.clone(); + let config = GANACHE_CONFIG.clone(); (config, connection) } @@ -217,9 +217,23 @@ mod test { async fn session_uid_rate_limit() { let (config, database) = setup().await; + let rule = Rule { + uids: None, + rate_limit: Some(RateLimit { + limit_type: "sid".to_string(), + time_frame: Duration::from_millis(20_000), + }), + }; + let campaign = get_campaign(rule); + + let chain_context = config + .find_chain_token(campaign.channel.token) + .expect("Campaign's Channel.token should be set in config"); + let auth = Auth { era: 0, uid: IDS["follower"], + chain: chain_context.chain.clone(), }; let session = Session { @@ -229,15 +243,7 @@ mod test { os: None, }; - let rule = Rule { - uids: None, - rate_limit: Some(RateLimit { - limit_type: "sid".to_string(), - time_frame: Duration::from_millis(20_000), - }), - }; let events = get_impression_events(2); - let campaign = get_campaign(rule); let response = check_access( &database, @@ -271,9 +277,24 @@ mod test { async fn ip_rate_limit() { let (config, database) = setup().await; + let rule = Rule { + uids: None, + rate_limit: Some(RateLimit { + limit_type: "ip".to_string(), + time_frame: Duration::from_millis(1), + }), + }; + + let campaign = get_campaign(rule); + + let chain_context = config + .find_chain_token(campaign.channel.token) + .expect("Campaign's Channel.token should be set in config"); + let auth = Auth { era: 0, uid: IDS["follower"], + chain: chain_context.chain.clone(), }; let session = Session { @@ -283,15 +304,6 @@ mod test { os: None, }; - let rule = Rule { - uids: None, - rate_limit: Some(RateLimit { - limit_type: "ip".to_string(), - time_frame: Duration::from_millis(1), - }), - }; - let campaign = get_campaign(rule); - let err_response = check_access( &database, &session, @@ -325,9 +337,24 @@ mod test { async fn check_access_past_channel_valid_until() { let (config, database) = setup().await; + let rule = Rule { + uids: None, + rate_limit: Some(RateLimit { + limit_type: "ip".to_string(), + time_frame: Duration::from_millis(1), + }), + }; + let mut campaign = get_campaign(rule); + campaign.active.to = Utc.ymd(1970, 1, 1).and_hms(12, 00, 9); + + let chain_context = config + .find_chain_token(campaign.channel.token) + .expect("Campaign's Channel.token should be set in config"); + let auth = Auth { era: 0, uid: IDS["follower"], + chain: chain_context.chain.clone(), }; let session = Session { @@ -337,16 +364,6 @@ mod test { os: None, }; - let rule = Rule { - uids: None, - rate_limit: Some(RateLimit { - limit_type: "ip".to_string(), - time_frame: Duration::from_millis(1), - }), - }; - let mut campaign = get_campaign(rule); - campaign.active.to = Utc.ymd(1970, 1, 1).and_hms(12, 00, 9); - let err_response = check_access( &database, &session, @@ -364,9 +381,23 @@ mod test { async fn with_forbidden_country() { let (config, database) = setup().await; + let rule = Rule { + uids: None, + rate_limit: Some(RateLimit { + limit_type: "ip".to_string(), + time_frame: Duration::from_millis(1), + }), + }; + let campaign = get_campaign(rule); + + let chain_context = config + .find_chain_token(campaign.channel.token) + .expect("Campaign's Channel.token should be set in config"); + let auth = Auth { era: 0, uid: IDS["follower"], + chain: chain_context.chain.clone(), }; let session = Session { @@ -376,15 +407,6 @@ mod test { os: None, }; - let rule = Rule { - uids: None, - rate_limit: Some(RateLimit { - limit_type: "ip".to_string(), - time_frame: Duration::from_millis(1), - }), - }; - let campaign = get_campaign(rule); - let err_response = check_access( &database, &session, @@ -402,9 +424,23 @@ mod test { async fn with_forbidden_referrer() { let (config, database) = setup().await; + let rule = Rule { + uids: None, + rate_limit: Some(RateLimit { + limit_type: "ip".to_string(), + time_frame: Duration::from_millis(1), + }), + }; + let campaign = get_campaign(rule); + + let chain_context = config + .find_chain_token(campaign.channel.token) + .expect("Campaign's Channel.token should be set in config"); + let auth = Auth { era: 0, uid: IDS["follower"], + chain: chain_context.chain.clone(), }; let session = Session { @@ -414,15 +450,6 @@ mod test { os: None, }; - let rule = Rule { - uids: None, - rate_limit: Some(RateLimit { - limit_type: "ip".to_string(), - time_frame: Duration::from_millis(1), - }), - }; - let campaign = get_campaign(rule); - let err_response = check_access( &database, &session, @@ -440,9 +467,20 @@ mod test { async fn no_rate_limit() { let (config, database) = setup().await; + let rule = Rule { + uids: None, + rate_limit: None, + }; + let campaign = get_campaign(rule); + + let chain_context = config + .find_chain_token(campaign.channel.token) + .expect("Campaign's Channel.token should be set in config"); + let auth = Auth { era: 0, uid: IDS["follower"], + chain: chain_context.chain.clone(), }; let session = Session { @@ -452,12 +490,6 @@ mod test { os: None, }; - let rule = Rule { - uids: None, - rate_limit: None, - }; - let campaign = get_campaign(rule); - let ok_response = check_access( &database, &session, @@ -475,9 +507,23 @@ mod test { async fn applied_rules() { let (config, mut database) = setup().await; + let rule = Rule { + uids: None, + rate_limit: Some(RateLimit { + limit_type: "ip".to_string(), + time_frame: Duration::from_millis(60_000), + }), + }; + let campaign = get_campaign(rule); + + let chain_context = config + .find_chain_token(campaign.channel.token) + .expect("Campaign's Channel.token should be set in config"); + let auth = Auth { era: 0, uid: IDS["follower"], + chain: chain_context.chain.clone(), }; let session = Session { @@ -487,15 +533,6 @@ mod test { os: None, }; - let rule = Rule { - uids: None, - rate_limit: Some(RateLimit { - limit_type: "ip".to_string(), - time_frame: Duration::from_millis(60_000), - }), - }; - let campaign = get_campaign(rule); - let ok_response = check_access( &database, &session, diff --git a/sentry/src/lib.rs b/sentry/src/lib.rs index ff245a7ca..239854eb9 100644 --- a/sentry/src/lib.rs +++ b/sentry/src/lib.rs @@ -27,8 +27,6 @@ use { routes::{ campaign, campaign::{campaign_list, create_campaign, update_campaign}, - get_cfg, - get_analytics, channel::{ add_spender_leaf, channel_list, get_accounting_for_channel, get_all_spender_limits, get_spender_limits, last_approved, @@ -36,16 +34,17 @@ use { create_validator_messages, extract_params, list_validator_messages, }, }, + get_analytics, get_cfg, }, }; -pub mod analytics; -pub mod middleware; -pub mod routes; pub mod access; +pub mod analytics; pub mod application; pub mod db; +pub mod middleware; pub mod payout; +pub mod routes; pub mod spender; static LAST_APPROVED_BY_CHANNEL_ID: Lazy = Lazy::new(|| { @@ -217,7 +216,6 @@ async fn analytics_router( mut req: Request, app: &Application, ) -> Result, ResponseError> { - let (route, method) = (req.uri().path(), req.method()); match (route, method) { @@ -497,11 +495,13 @@ pub struct Session { pub os: Option, } -/// Sentry [`Application`] Auth (Authentication) +/// Validated Authentication for the Sentry [`Application`]. #[derive(Debug, Clone)] pub struct Auth { pub era: i64, pub uid: ValidatorId, + /// The Chain for which this authentication was validated + pub chain: primitives::Chain, } #[cfg(test)] @@ -511,7 +511,7 @@ pub mod test_util { Adapter, }; use primitives::{ - config::DEVELOPMENT_CONFIG, + config::GANACHE_CONFIG, util::tests::{discard_logger, prep_db::IDS}, }; @@ -527,10 +527,16 @@ pub mod test_util { /// Uses development and therefore the goerli testnet addresses of the tokens /// It still uses DummyAdapter. pub async fn setup_dummy_app() -> Application { - let config = DEVELOPMENT_CONFIG.clone(); + let config = GANACHE_CONFIG.clone(); let adapter = Adapter::new(Dummy::init(Options { dummy_identity: IDS["leader"], - dummy_auth_tokens: Default::default(), + dummy_auth_tokens: vec![ + (IDS["creator"].to_address(), "AUTH_Creator".into()), + (IDS["leader"].to_address(), "AUTH_Leader".into()), + (IDS["follower"].to_address(), "AUTH_Follower".into()), + ] + .into_iter() + .collect(), })); let redis = TESTS_POOL.get().await.expect("Should return Object"); diff --git a/sentry/src/middleware/auth.rs b/sentry/src/middleware/auth.rs index cc41bfbb6..2051cdbe6 100644 --- a/sentry/src/middleware/auth.rs +++ b/sentry/src/middleware/auth.rs @@ -133,6 +133,7 @@ async fn for_request( let auth = Auth { era: adapter_session.era, uid: ValidatorId::from(adapter_session.uid), + chain: adapter_session.chain, }; req.extensions_mut().insert(auth); diff --git a/sentry/src/middleware/campaign.rs b/sentry/src/middleware/campaign.rs index b836efd7b..aacdfee6a 100644 --- a/sentry/src/middleware/campaign.rs +++ b/sentry/src/middleware/campaign.rs @@ -31,7 +31,26 @@ impl Middleware for CampaignLoad { .await? .ok_or(ResponseError::NotFound)?; - request.extensions_mut().insert(campaign); + let campaign_context = application + .config + .find_chain_token(campaign.channel.token) + .ok_or(ResponseError::BadRequest( + "Channel token not whitelisted".to_string(), + ))? + .with_campaign(campaign); + + // If this is an authenticated call + // Check if the Campaign's Channel context (Chain Id) aligns with the Authentication token Chain id + match request.extensions().get::() { + // If Chain Ids differ, the requester hasn't generated Auth token + // to access the Channel in it's Chain Id. + Some(auth) if auth.chain.chain_id != campaign_context.chain.chain_id => { + return Err(ResponseError::Forbidden("Authentication token is generated for different Chain and differs from the Campaign's Channel Chain".into())) + } + _ => {}, + } + + request.extensions_mut().insert(campaign_context); Ok(request) } @@ -68,7 +87,7 @@ impl Middleware for CalledByCreator { #[cfg(test)] mod test { - use primitives::{util::tests::prep_db::DUMMY_CAMPAIGN, Campaign}; + use primitives::{util::tests::prep_db::DUMMY_CAMPAIGN, Campaign, ChainOf}; use crate::{ db::{insert_campaign, insert_channel}, @@ -134,7 +153,14 @@ mod test { .await .expect("Should load campaign"); - assert_eq!(Some(&campaign), request.extensions().get::()); + assert_eq!( + campaign, + request + .extensions() + .get::>() + .expect("Should get Campaign with Chain context") + .context + ); } } } diff --git a/sentry/src/middleware/channel.rs b/sentry/src/middleware/channel.rs index 8fd059db3..f60f7aab6 100644 --- a/sentry/src/middleware/channel.rs +++ b/sentry/src/middleware/channel.rs @@ -1,7 +1,7 @@ use crate::{ db::{get_channel_by_id, get_channel_by_id_and_validator}, middleware::Middleware, - Application, ResponseError, RouteParams, + Application, Auth, ResponseError, RouteParams, }; use adapter::client::Locked; use futures::future::{BoxFuture, FutureExt}; @@ -45,7 +45,20 @@ fn channel_load( .await? .ok_or(ResponseError::NotFound)?; - req.extensions_mut().insert(channel); + let channel_context = app.config.find_chain_token(channel.token).ok_or(ResponseError::FailedValidation("Channel token is not whitelisted in this validator".into()))?.with_channel(channel); + + // If this is an authenticated call + // Check if the Channel context (Chain Id) aligns with the Authentication token Chain id + match req.extensions().get::() { + // If Chain Ids differ, the requester hasn't generated Auth token + // to access the Channel in it's Chain Id. + Some(auth) if auth.chain.chain_id != channel_context.chain.chain_id => { + return Err(ResponseError::Forbidden("Authentication token is generated for different Chain and differs from the Channel's Chain".into())) + } + _ => {}, + } + + req.extensions_mut().insert(channel_context); Ok(req) } diff --git a/sentry/src/routes.rs b/sentry/src/routes.rs index 086f7d2be..58feed512 100644 --- a/sentry/src/routes.rs +++ b/sentry/src/routes.rs @@ -1,124 +1,124 @@ //! Sentry REST API documentation -//! +//! //! ## Channel -//! +//! //! All routes are implemented under module [channel]. -//! +//! //! - [`GET /v5/channel/list`](crate::routes::channel::channel_list) -//! +//! //! todo -//! +//! //! - [`GET /v5/channel/:id/accounting`](channel::get_accounting_for_channel) -//! +//! //! todo -//! +//! //! - [`GET /v5/channel/:id/spender/:addr`](channel::get_spender_limits) (auth required) -//! +//! //! todo -//! +//! //! - [`POST /v5/channel/:id/spender/:addr`](channel::add_spender_leaf) (auth required) -//! +//! //! todo -//! +//! //! - [`GET /v5/channel/:id/spender/all`](channel::get_all_spender_limits) (auth required) -//! +//! //! todo -//! +//! //! - [`GET /v5/channel/:id/validator-messages`](channel::validator_message::list_validator_messages) -//! +//! //! - `GET /v5/channel/:id/validator-messages/:ValidatorId` - filter by ValidatorId //! - `GET /v5/channel/:id/validator-messages/:ValidatorId/NewState+ApproveState` - filters by a given [`primitives::ValidatorId`] and a //! [`Validator message types`](primitives::validator::MessageTypes). -//! +//! //! Request query parameters: [channel::validator_message::ValidatorMessagesListQuery] //! Response: [primitives::sentry::ValidatorMessageResponse] -//! +//! //! - [`POST /v5/channel/:id/validator-messages`](channel::validator_message::create_validator_messages) (auth required) -//! +//! //! todo -//! +//! //! - [`POST /v5/channel/:id/last-approved`](channel::last_approved) -//! +//! //! todo -//! +//! //! - `POST /v5/channel/:id/pay` (auth required) -//! +//! //! TODO: implement and document as part of issue #382 -//! +//! //! Channel Payout with authentication of the spender -//! +//! //! Withdrawals of advertiser funds - re-introduces the PAY event with a separate route. -//! +//! //! - `GET /v5/channel/:id/get-leaf` -//! +//! //! TODO: implement and document as part of issue #382 -//! +//! //! This route gets the latest approved state (`NewState`/`ApproveState` pair), //! and finds the given `spender`/`earner` in the balances tree, and produce a merkle proof for it. //! This is useful for the Platform to verify if a spender leaf really exists. -//! +//! //! Query parameters: -//! +//! //! - `spender=[0x...]` or `earner=[0x...]` (required) -//! +//! //! Example Spender: -//! +//! //! `/get-leaf?spender=0x...` -//! +//! //! Example Earner: -//! +//! //! `/get-leaf?earner=0x....` //! This module includes all routes for `Sentry` and the documentation of each Request/Response. -//! +//! //! ## Campaign -//! +//! //! All routes are implemented under module [campaign]. -//! +//! //! - `GET /v5/campaign/list` -//! +//! //! Lists all campaigns with pagination and orders them in descending order (`DESC`) by `Campaign.created`. This ensures that the order in the pages will not change if a new `Campaign` is created while still retrieving a page. -//! +//! //! Query parameters: //! - `page=[integer]` (optional) default: `0` //! - `creator=[0x....]` (optional) - address of the creator to be filtered by //! - `activeTo=[integer]` (optional) in seconds - filters campaigns by `Campaign.active.to > query.activeTo` -//! - `validator=[0x...]` or `leader=[0x...]` (optional) - address of the validator to be filtered by. You can either +//! - `validator=[0x...]` or `leader=[0x...]` (optional) - address of the validator to be filtered by. You can either //! - `validator=[0x...]` - it will return all `Campaign`s where this address is **either** `Channel.leader` or `Channel.follower` //! - `leader=[0x...]` - it will return all `Campaign`s where this address is `Channel.leader` -//! -//! +//! +//! //! - `POST /v5/campaign` (auth required) -//! +//! //! Create a new Campaign. -//! +//! //! It will make sure the `Channel` is created if new and it will update the spendable amount using the `Adapter::get_deposit()`. -//! +//! //! Authentication: **required** to validate `Campaign.creator == Auth.uid` -//! +//! //! Request Body: [`primitives::sentry::campaign_create::CreateCampaign`] (json) -//! +//! //! `POST /v5/campaign/:id/close` (auth required) -//! +//! //! todo -//! +//! //! ## Analytics -//! +//! //! - `GET /v5/analytics` -//! +//! //! todo -//! +//! //! - `GET /v5/analytics/for-publisher` (auth required) -//! +//! //! todo -//! +//! //! - `GET /v5/analytics/for-advertiser` (auth required) -//! +//! //! todo -//! +//! //! - `GET /v5/analytics/for-admin` (auth required) -//! +//! //! todo -//! +//! pub use analytics::analytics as get_analytics; pub use cfg::config as get_cfg; @@ -128,4 +128,4 @@ mod analytics; pub mod campaign; // `cfg` module has single request, so we only export this request mod cfg; -pub mod channel; \ No newline at end of file +pub mod channel; diff --git a/sentry/src/routes/analytics.rs b/sentry/src/routes/analytics.rs index 091664050..953362ecd 100644 --- a/sentry/src/routes/analytics.rs +++ b/sentry/src/routes/analytics.rs @@ -75,6 +75,7 @@ mod test { test_util::setup_dummy_app, Auth, ValidatorId, }; + use adapter::dummy::DUMMY_CHAIN; use chrono::{Timelike, Utc}; use primitives::{ analytics::{query::Time, Metric, OperatingSystem, Timeframe}, @@ -980,15 +981,18 @@ mod test { let publisher_auth = Auth { era: 0, uid: ValidatorId::from(*PUBLISHER), + chain: DUMMY_CHAIN.clone(), }; let advertiser_auth = Auth { era: 0, uid: ValidatorId::from(*CREATOR), + chain: DUMMY_CHAIN.clone(), }; let admin_auth = Auth { era: 0, uid: ValidatorId::try_from("0xce07CbB7e054514D590a0262C93070D838bFBA2e") .expect("should create"), + chain: DUMMY_CHAIN.clone(), }; // test for publisher { diff --git a/sentry/src/routes/campaign.rs b/sentry/src/routes/campaign.rs index 26b3082b9..c2f2bbe11 100644 --- a/sentry/src/routes/campaign.rs +++ b/sentry/src/routes/campaign.rs @@ -18,14 +18,13 @@ use futures::future::try_join_all; use hyper::{Body, Request, Response}; use primitives::{ campaign_validator::Validator, - config::TokenInfo, sentry::{ campaign::CampaignListQuery, campaign_create::{CreateCampaign, ModifyCampaign}, SuccessResponse, }, spender::Spendable, - Address, Campaign, CampaignId, Channel, ChannelId, Deposit, UnifiedNum, + Address, Campaign, CampaignId, ChainOf, Channel, ChannelId, Deposit, UnifiedNum, }; use slog::error; use std::cmp::{max, Ordering}; @@ -70,20 +69,22 @@ pub enum LatestSpendableError { pub async fn update_latest_spendable( adapter: &Adapter, pool: &DbPool, - channel: Channel, - token: &TokenInfo, + channel_context: &ChainOf, address: Address, ) -> Result where C: Locked + 'static, { - let latest_deposit = adapter.get_deposit(&channel, address).await?; + let latest_deposit = adapter.get_deposit(&channel_context, address).await?; let spendable = Spendable { spender: address, - channel, - deposit: Deposit::::from_precision(latest_deposit, token.precision.get()) - .ok_or(LatestSpendableError::Overflow)?, + channel: channel_context.context, + deposit: Deposit::::from_precision( + latest_deposit, + channel_context.token.precision.get(), + ) + .ok_or(LatestSpendableError::Overflow)?, }; Ok(update_spendable(pool.clone(), &spendable).await?) @@ -91,14 +92,17 @@ where pub async fn fetch_campaign_ids_for_channel( pool: &DbPool, - channel_id: &ChannelId, + channel_id: ChannelId, limit: u32, ) -> Result, ResponseError> { - let campaign_ids = get_campaign_ids_by_channel(pool, channel_id, limit.into(), 0).await?; + let campaign_ids = get_campaign_ids_by_channel(pool, &channel_id, limit.into(), 0).await?; let total_count = list_campaigns_total_count( pool, - (&["campaigns.channel_id = $1".to_string()], vec![channel_id]), + ( + &["campaigns.channel_id = $1".to_string()], + vec![&channel_id], + ), ) .await?; @@ -115,7 +119,7 @@ pub async fn fetch_campaign_ids_for_channel( let other_pages: Vec> = try_join_all((1..total_pages).map(|i| { get_campaign_ids_by_channel( pool, - channel_id, + &channel_id, limit.into(), i.checked_mul(limit.into()).expect("TODO"), ) @@ -146,15 +150,15 @@ where let body = hyper::body::to_bytes(req.into_body()).await?; - let campaign = serde_json::from_slice::(&body) + let campaign_context = serde_json::from_slice::(&body) .map_err(|e| ResponseError::FailedValidation(e.to_string()))? // create the actual `Campaign` with a randomly generated `CampaignId` or the set `CampaignId` - .into_campaign(); - - // Validate the campaign as soon as a valid JSON was passed. - campaign + .into_campaign() + // Validate the campaign as soon as a valid JSON was passed. + // This will validate the Context - Chain & Token are whitelisted! .validate(&app.config, &app.adapter.whoami()) .map_err(|err| ResponseError::FailedValidation(err.to_string()))?; + let campaign = &campaign_context.context; if auth.uid.to_address() != campaign.creator { return Err(ResponseError::Forbidden( @@ -162,12 +166,6 @@ where )); } - let token = app - .config - .token_address_whitelist - .get(&campaign.channel.token) - .ok_or_else(|| ResponseError::BadRequest("Channel token is not whitelisted".to_string()))?; - // make sure that the Channel is available in the DB // insert Channel insert_channel(&app.pool, campaign.channel) @@ -192,8 +190,7 @@ where let latest_spendable = update_latest_spendable( &app.adapter, &app.pool, - campaign.channel, - token, + &campaign_context.of_channel(), campaign.creator, ) .await @@ -210,7 +207,7 @@ where let channel_campaigns = fetch_campaign_ids_for_channel( &app.pool, - &campaign.channel.id(), + campaign.channel.id(), app.config.campaigns_find_limit, ) .await?; @@ -312,11 +309,12 @@ pub async fn close_campaign( .get::() .expect("Auth should be present"); - let mut campaign = req + let campaign_context = req .extensions() - .get::() + .get::>() .expect("We must have a campaign in extensions") .to_owned(); + let mut campaign = campaign_context.context; if auth.uid.to_address() != campaign.creator { Err(ResponseError::Forbidden( @@ -333,7 +331,7 @@ pub async fn close_campaign( .budget .checked_sub(&UnifiedNum::from(old_remaining)) .ok_or_else(|| { - ResponseError::BadRequest("Campaign budget overflows/underflows".to_string()) + ResponseError::BadRequest("Campaign budget overflow/underflow".to_string()) })?; update_campaign(&app.pool, &campaign).await?; @@ -356,9 +354,9 @@ pub mod update_campaign { ) -> Result, ResponseError> { let campaign_being_mutated = req .extensions() - .get::() + .get::>() .expect("We must have a campaign in extensions") - .to_owned(); + .clone(); let body = hyper::body::to_bytes(req.into_body()).await?; @@ -371,7 +369,7 @@ pub mod update_campaign { &app.pool, &app.config, &app.campaign_remaining, - campaign_being_mutated, + &campaign_being_mutated, modify_campaign_fields, ) .await @@ -385,9 +383,10 @@ pub mod update_campaign { pool: &DbPool, config: &Config, campaign_remaining: &CampaignRemaining, - campaign: Campaign, + campaign_context: &ChainOf, modify_campaign: ModifyCampaign, ) -> Result { + let campaign = &campaign_context.context; // *NOTE*: When updating campaigns make sure sum(campaigns.map(getRemaining)) <= totalDeposited - totalSpent // !WARNING!: totalSpent != sum(campaign.map(c => c.spending)) therefore we must always calculate remaining funds based on total_deposit - lastApprovedNewState.spenders[user] // *NOTE*: To close a campaign set campaignBudget to campaignSpent so that spendable == 0 @@ -413,14 +412,13 @@ pub mod update_campaign { .map(|accounting| accounting.amount) .unwrap_or_default(); - let token = config - .token_address_whitelist - .get(&campaign.channel.token) - .ok_or(Error::ChannelTokenNotWhitelisted)?; - - let latest_spendable = - update_latest_spendable(&adapter, pool, campaign.channel, token, campaign.creator) - .await?; + let latest_spendable = update_latest_spendable( + &adapter, + pool, + &campaign_context.of_channel(), + campaign.creator, + ) + .await?; // Gets the latest Spendable for this (spender, channelId) pair let total_deposited = latest_spendable.deposit.total; @@ -431,7 +429,7 @@ pub mod update_campaign { let channel_campaigns = fetch_campaign_ids_for_channel( pool, - &campaign.channel.id(), + campaign.channel.id(), config.campaigns_find_limit, ) .await @@ -479,7 +477,7 @@ pub mod update_campaign { }; } - let modified_campaign = modify_campaign.apply(campaign); + let modified_campaign = modify_campaign.apply(campaign.clone()); update_campaign(pool, &modified_campaign).await?; Ok(modified_campaign) @@ -577,7 +575,7 @@ pub mod insert_events { use primitives::{ balances::{Balances, CheckedState, OverflowError}, sentry::{Event, SuccessResponse}, - Address, Campaign, CampaignId, DomainError, UnifiedNum, ValidatorDesc, + Address, Campaign, CampaignId, ChainOf, DomainError, UnifiedNum, ValidatorDesc, }; use slog::error; use thiserror::Error; @@ -620,9 +618,9 @@ pub mod insert_events { .get::() .expect("request should have session"); - let campaign = req_head + let campaign_context = req_head .extensions - .get::() + .get::>() .expect("request should have a Campaign loaded"); let body_bytes = hyper::body::to_bytes(req_body).await?; @@ -632,7 +630,7 @@ pub mod insert_events { .remove("events") .ok_or_else(|| ResponseError::BadRequest("invalid request".to_string()))?; - let processed = process_events(app, auth, session, campaign, events).await?; + let processed = process_events(app, auth, session, campaign_context, events).await?; Ok(Response::builder() .header("Content-type", "application/json") @@ -644,16 +642,18 @@ pub mod insert_events { app: &Application, auth: Option<&Auth>, session: &Session, - campaign: &Campaign, + campaign_context: &ChainOf, events: Vec, ) -> Result { + let campaign = &campaign_context.context; + // handle events - check access check_access( &app.redis, session, auth, &app.config.ip_rate_limit, - campaign, + &campaign_context.context, &events, ) .await @@ -679,14 +679,14 @@ pub mod insert_events { for event in events.into_iter() { let event_payout = { // calculate earners payouts - let payout = get_payout(&app.logger, campaign, &event, session)?; + let payout = get_payout(&app.logger, &campaign_context.context, &event, session)?; match payout { Some((earner, payout)) => { let spending_result = spend_for_event( &app.pool, &app.campaign_remaining, - campaign, + &campaign_context.context, earner, leader, follower, @@ -698,7 +698,7 @@ pub mod insert_events { match spending_result { Ok(()) => Some((event, earner, payout)), Err(err) => { - error!(&app.logger, "Payout spending failed: {}", err; "campaign" => ?campaign, "event" => ?event, "earner" => ?earner, "unpaid amount" => %payout, "err" => ?err); + error!(&app.logger, "Payout spending failed: {}", err; "campaign" => ?campaign_context, "event" => ?event, "earner" => ?earner, "unpaid amount" => %payout, "err" => ?err); None } @@ -715,8 +715,15 @@ pub mod insert_events { } // Record successfully paid out events to Analytics - if let Err(err) = analytics::record(&app.pool, campaign, session, events_success).await { - error!(&app.logger, "Analytics recording failed: {}", err; "campaign" => ?campaign, "err" => ?err) + if let Err(err) = analytics::record( + &app.pool, + &campaign_context.context, + session, + events_success, + ) + .await + { + error!(&app.logger, "Analytics recording failed: {}", err; "campaign" => ?campaign_context, "err" => ?err) } Ok(true) @@ -987,7 +994,7 @@ mod test { use hyper::StatusCode; use primitives::{ util::tests::prep_db::{DUMMY_CAMPAIGN, IDS}, - BigNum, ChannelId, ValidatorId, + BigNum, ValidatorId, }; #[tokio::test] @@ -996,24 +1003,27 @@ mod test { /// Test with multiple campaigns (because of Budget) a modification of campaign async fn create_and_modify_with_multiple_campaigns() { let app = setup_dummy_app().await; - let dummy_campaign = DUMMY_CAMPAIGN.clone(); + + // Create a new Campaign with different CampaignId + let dummy_channel = DUMMY_CAMPAIGN.channel; + let channel_chain = app + .config + .find_chain_token(dummy_channel.token) + .expect("Channel token should be whitelisted in config!"); + let channel_context = channel_chain.with_channel(dummy_channel); + let multiplier = 10_u64.pow(UnifiedNum::PRECISION.into()); // this function should be called before each creation/modification of a Campaign! - let add_deposit_call = |channel: ChannelId, creator: Address, token: Address| { + let add_deposit_call = |channel_context: &ChainOf, for_address: Address| { app.adapter.client.add_deposit_call( - channel, - creator, + channel_context.context.id(), + for_address, Deposit { // a deposit 4 times larger than the Campaign Budget - total: UnifiedNum::from(200_000_000_000).to_precision( - app.config - .token_address_whitelist - .get(&token) - .expect("Should get token") - .precision - .get(), - ), + // I.e. 4 TOKENS + total: UnifiedNum::from(200_000_000_000) + .to_precision(channel_context.token.precision.get()), still_on_create2: BigNum::from(0), }, ) @@ -1023,6 +1033,7 @@ mod test { let auth = Auth { era: 0, uid: ValidatorId::from(create_campaign.creator), + chain: channel_context.chain.clone(), }; let body = @@ -1034,12 +1045,12 @@ mod test { .expect("Should build Request") }; - let campaign: Campaign = { + let campaign_context: ChainOf = { // erases the CampaignId for the CreateCampaign request - let mut create = CreateCampaign::from_campaign_erased(dummy_campaign, None); + let mut create = CreateCampaign::from_campaign_erased(DUMMY_CAMPAIGN.clone(), None); create.budget = UnifiedNum::from(500 * multiplier); // prepare for Campaign creation - add_deposit_call(create.channel.id(), create.creator, create.channel.token); + add_deposit_call(&channel_context, create.creator); let create_response = create_campaign(build_request(create), &app) .await @@ -1067,7 +1078,8 @@ mod test { UnifiedNum::from(50_000_000_000), UnifiedNum::from(remaining.unsigned_abs()) ); - campaign + + channel_context.clone().with(campaign) }; // modify campaign @@ -1083,18 +1095,14 @@ mod test { targeting_rules: None, }; // prepare for Campaign modification - add_deposit_call( - campaign.channel.id(), - campaign.creator, - campaign.channel.token, - ); + add_deposit_call(&channel_context, campaign_context.context.creator); let modified_campaign = modify_campaign( app.adapter.clone(), &app.pool, &app.config, &app.campaign_remaining, - campaign.clone(), + &campaign_context, modify, ) .await @@ -1103,7 +1111,7 @@ mod test { assert_eq!(new_budget, modified_campaign.budget); assert_eq!(Some("Updated title".to_string()), modified_campaign.title); - modified_campaign + channel_context.clone().with(modified_campaign) }; // we have 1000 left from our deposit, so we are using half of it @@ -1114,11 +1122,7 @@ mod test { create_second.budget = UnifiedNum::from(500 * multiplier); // prepare for Campaign creation - add_deposit_call( - create_second.channel.id(), - create_second.creator, - create_second.channel.token, - ); + add_deposit_call(&channel_context, create_second.creator); let create_response = create_campaign(build_request(create_second), &app) .await @@ -1144,7 +1148,7 @@ mod test { create.budget = UnifiedNum::from(600 * multiplier); // prepare for Campaign creation - add_deposit_call(create.channel.id(), create.creator, create.channel.token); + add_deposit_call(&channel_context, create.creator); let create_err = create_campaign(build_request(create), &app) .await @@ -1172,18 +1176,14 @@ mod test { }; // prepare for Campaign modification - add_deposit_call( - modified.channel.id(), - modified.creator, - modified.channel.token, - ); + add_deposit_call(&channel_context, modified.context.creator); let modified_campaign = modify_campaign( app.adapter.clone(), &app.pool, &app.config, &app.campaign_remaining, - modified, + &modified, modify, ) .await @@ -1191,7 +1191,7 @@ mod test { assert_eq!(lower_budget, modified_campaign.budget); - modified_campaign + modified.clone().with(modified_campaign) }; // Just enough budget to create this Campaign @@ -1203,7 +1203,7 @@ mod test { create.budget = UnifiedNum::from(600 * multiplier); // prepare for Campaign creation - add_deposit_call(create.channel.id(), create.creator, create.channel.token); + add_deposit_call(&channel_context, create.creator); let create_response = create_campaign(build_request(create), &app) .await @@ -1234,18 +1234,14 @@ mod test { }; // prepare for Campaign modification - add_deposit_call( - modified.channel.id(), - modified.creator, - modified.channel.token, - ); + add_deposit_call(&channel_context, modified.context.creator); let modify_err = modify_campaign( app.adapter.clone(), &app.pool, &app.config, &app.campaign_remaining, - modified, + &modified, modify, ) .await @@ -1325,7 +1321,9 @@ mod test { #[tokio::test] async fn campaign_is_closed_properly() { - let campaign = DUMMY_CAMPAIGN.clone(); + // create a new campaign with a new CampaignId + let campaign = + CreateCampaign::from_campaign_erased(DUMMY_CAMPAIGN.clone(), None).into_campaign(); let app = setup_dummy_app().await; @@ -1336,6 +1334,12 @@ mod test { .await .expect("Should insert dummy campaign"); + let campaign_context = app + .config + .find_chain_token(campaign.channel.token) + .expect("Config should have the Dummy campaign.channel.token") + .with(campaign.clone()); + // Test if remaining is set to 0 { app.campaign_remaining @@ -1346,11 +1350,12 @@ mod test { let auth = Auth { era: 0, uid: ValidatorId::from(campaign.creator), + chain: campaign_context.chain.clone(), }; let req = Request::builder() .extension(auth) - .extension(campaign.clone()) + .extension(campaign_context.clone()) .body(Body::empty()) .expect("Should build Request"); @@ -1381,11 +1386,12 @@ mod test { let auth = Auth { era: 0, uid: IDS["leader"], + chain: campaign_context.chain.clone(), }; let req = Request::builder() .extension(auth) - .extension(campaign.clone()) + .extension(campaign_context.clone()) .body(Body::empty()) .expect("Should build Request"); diff --git a/sentry/src/routes/channel.rs b/sentry/src/routes/channel.rs index c97d93143..b90ee1ecf 100644 --- a/sentry/src/routes/channel.rs +++ b/sentry/src/routes/channel.rs @@ -1,5 +1,5 @@ //! Channel - `/v5/channel` routes -//! +//! use crate::db::{ accounting::{get_all_accountings_for_channel, update_accounting, Side}, @@ -14,14 +14,13 @@ use futures::future::try_join_all; use hyper::{Body, Request, Response}; use primitives::{ balances::{Balances, CheckedState, UncheckedState}, - config::TokenInfo, sentry::{ channel_list::ChannelListQuery, AccountingResponse, AllSpendersQuery, AllSpendersResponse, LastApproved, LastApprovedQuery, LastApprovedResponse, SpenderResponse, SuccessResponse, }, spender::{Spendable, Spender}, validator::NewState, - Address, Channel, Deposit, UnifiedNum, + Address, ChainOf, Channel, Deposit, UnifiedNum, }; use slog::{error, Logger}; use std::{collections::HashMap, str::FromStr}; @@ -52,10 +51,11 @@ pub async fn last_approved( app: &Application, ) -> Result, ResponseError> { // get request Channel - let channel = *req + let channel = req .extensions() - .get::() - .ok_or(ResponseError::NotFound)?; + .get::>() + .ok_or(ResponseError::NotFound)? + .context; let default_response = Response::builder() .header("Content-type", "application/json") @@ -113,17 +113,18 @@ pub async fn last_approved( /// This will make sure to insert/get the `Channel` from DB before attempting to create the `Spendable` async fn create_or_update_spendable_document( adapter: &Adapter, - token_info: &TokenInfo, pool: DbPool, - channel: &Channel, + channel_context: &ChainOf, spender: Address, ) -> Result { - insert_channel(&pool, *channel).await?; - - let deposit = adapter.get_deposit(channel, spender).await?; - let total = UnifiedNum::from_precision(deposit.total, token_info.precision.get()); - let still_on_create2 = - UnifiedNum::from_precision(deposit.still_on_create2, token_info.precision.get()); + insert_channel(&pool, channel_context.context).await?; + + let deposit = adapter.get_deposit(channel_context, spender).await?; + let total = UnifiedNum::from_precision(deposit.total, channel_context.token.precision.get()); + let still_on_create2 = UnifiedNum::from_precision( + deposit.still_on_create2, + channel_context.token.precision.get(), + ); let (total, still_on_create2) = match (total, still_on_create2) { (Some(total), Some(still_on_create2)) => (total, still_on_create2), _ => { @@ -134,7 +135,7 @@ async fn create_or_update_spendable_document( }; let spendable = Spendable { - channel: *channel, + channel: channel_context.context, deposit: Deposit { total, still_on_create2, @@ -169,30 +170,24 @@ pub async fn get_spender_limits( .get::() .expect("request should have route params"); - let channel = req + let channel_context = req .extensions() - .get::() - .expect("Request should have Channel") + .get::>() + .expect("Request should have Channel & Chain/TokenInfo") .to_owned(); + let channel = &channel_context.context; let spender = Address::from_str(&route_params.index(1))?; let latest_spendable = fetch_spendable(app.pool.clone(), &spender, &channel.id()).await?; - let token_info = app - .config - .token_address_whitelist - .get(&channel.token) - .ok_or_else(|| ResponseError::FailedValidation("Unsupported Channel Token".to_string()))?; - let latest_spendable = match latest_spendable { Some(spendable) => spendable, None => { create_or_update_spendable_document( &app.adapter, - token_info, app.pool.clone(), - &channel, + &channel_context, spender, ) .await? @@ -226,9 +221,9 @@ pub async fn get_all_spender_limits( ) -> Result, ResponseError> { let channel = req .extensions() - .get::() + .get::>() .expect("Request should have Channel") - .to_owned(); + .context; let query = serde_urlencoded::from_str::(req.uri().query().unwrap_or(""))?; let limit = app.config.spendable_find_limit; @@ -287,9 +282,9 @@ pub async fn add_spender_leaf( let channel = req .extensions() - .get::() - .expect("Request should have Channel") - .to_owned(); + .get::>() + .ok_or(ResponseError::NotFound)? + .context; update_accounting( app.pool.clone(), @@ -346,9 +341,9 @@ pub async fn get_accounting_for_channel( ) -> Result, ResponseError> { let channel = req .extensions() - .get::() - .expect("Request should have Channel") - .to_owned(); + .get::>() + .ok_or(ResponseError::NotFound)? + .context; let accountings = get_all_accountings_for_channel(app.pool.clone(), channel.id()).await?; @@ -396,6 +391,7 @@ pub mod validator_message { use primitives::{ sentry::{SuccessResponse, ValidatorMessageResponse}, validator::MessageTypes, + ChainOf, }; use primitives::{Channel, DomainError, ValidatorId}; use serde::Deserialize; @@ -449,8 +445,9 @@ pub mod validator_message { let channel = req .extensions() - .get::() - .expect("Request should have Channel"); + .get::>() + .ok_or(ResponseError::NotFound)? + .context; let config_limit = app.config.msgs_find_limit as u64; let limit = query @@ -491,9 +488,9 @@ pub mod validator_message { let channel = req .extensions() - .get::() - .expect("Request should have Channel") - .to_owned(); + .get::>() + .ok_or(ResponseError::NotFound)? + .context; let into_body = req.into_body(); let body = hyper::body::to_bytes(into_body).await?; @@ -536,14 +533,17 @@ mod test { async fn create_and_fetch_spendable() { let app = setup_dummy_app().await; - let channel = DUMMY_CAMPAIGN.channel; + let (channel_context, channel) = { + let channel = DUMMY_CAMPAIGN.channel; + let channel_context = app + .config + .find_chain_token(DUMMY_CAMPAIGN.channel.token) + .expect("should retrieve Chain & token"); - let token_info = app - .config - .token_address_whitelist - .get(&channel.token) - .expect("should retrieve address"); - let precision: u8 = token_info.precision.into(); + (channel_context.with_channel(channel), channel) + }; + + let precision: u8 = channel_context.token.precision.into(); let deposit = Deposit { total: BigNum::from_str("100000000000000000000").expect("should convert"), // 100 DAI still_on_create2: BigNum::from_str("1000000000000000000").expect("should convert"), // 1 DAI @@ -559,9 +559,8 @@ mod test { // Call create_or_update_spendable let new_spendable = create_or_update_spendable_document( &app.adapter, - token_info, app.pool.clone(), - &channel, + &channel_context, ADDRESSES["creator"], ) .await @@ -599,9 +598,8 @@ mod test { let updated_spendable = create_or_update_spendable_document( &app.adapter, - token_info, app.pool.clone(), - &channel, + &channel_context, ADDRESSES["creator"], ) .await @@ -632,19 +630,24 @@ mod test { #[tokio::test] async fn get_accountings_for_channel() { let app = setup_dummy_app().await; - let channel = DUMMY_CAMPAIGN.channel; - insert_channel(&app.pool, channel) + let channel_context = app + .config + .find_chain_token(DUMMY_CAMPAIGN.channel.token) + .expect("Dummy channel Token should be present in config!") + .with(DUMMY_CAMPAIGN.channel); + + insert_channel(&app.pool, channel_context.context) .await .expect("should insert channel"); - let build_request = |channel: Channel| { + let build_request = |channel_context: &ChainOf| { Request::builder() - .extension(channel) + .extension(channel_context.clone()) .body(Body::empty()) .expect("Should build Request") }; // Testing for no accounting yet { - let res = get_accounting_for_channel(build_request(channel), &app) + let res = get_accounting_for_channel(build_request(&channel_context), &app) .await .expect("should get response"); assert_eq!(StatusCode::OK, res.status()); @@ -671,11 +674,15 @@ mod test { UnifiedNum::from_u64(100), ) .expect("Should not overflow"); - spend_amount(app.pool.clone(), channel.id(), balances.clone()) - .await - .expect("should spend"); + spend_amount( + app.pool.clone(), + channel_context.context.id(), + balances.clone(), + ) + .await + .expect("should spend"); - let res = get_accounting_for_channel(build_request(channel), &app) + let res = get_accounting_for_channel(build_request(&channel_context), &app) .await .expect("should get response"); assert_eq!(StatusCode::OK, res.status()); @@ -706,9 +713,12 @@ mod test { .await .expect("should spend"); - let res = get_accounting_for_channel(build_request(second_channel), &app) - .await - .expect("should get response"); + let res = get_accounting_for_channel( + build_request(&channel_context.clone().with(second_channel)), + &app, + ) + .await + .expect("should get response"); assert_eq!(StatusCode::OK, res.status()); let accounting_response = res_to_accounting_response(res).await; @@ -725,11 +735,11 @@ mod test { balances .spenders .insert(ADDRESSES["creator"], UnifiedNum::from_u64(200)); - spend_amount(app.pool.clone(), channel.id(), balances) + spend_amount(app.pool.clone(), channel_context.context.id(), balances) .await .expect("should spend"); - let res = get_accounting_for_channel(build_request(channel), &app).await; + let res = get_accounting_for_channel(build_request(&channel_context), &app).await; let expected = ResponseError::FailedValidation( "Earners sum is not equal to spenders sum for channel".to_string(), ); @@ -740,34 +750,41 @@ mod test { #[tokio::test] async fn adds_and_retrieves_spender_leaf() { let app = setup_dummy_app().await; - let channel = DUMMY_CAMPAIGN.channel; + let channel_context = app + .config + .find_chain_token(DUMMY_CAMPAIGN.channel.token) + .expect("Dummy channel Token should be present in config!") + .with(DUMMY_CAMPAIGN.channel); - insert_channel(&app.pool, channel) + insert_channel(&app.pool, channel_context.context) .await .expect("should insert channel"); - let get_accounting_request = |channel: Channel| { + let get_accounting_request = |channel_context: &ChainOf| { Request::builder() - .extension(channel) + .extension(channel_context.clone()) .body(Body::empty()) .expect("Should build Request") }; - let add_spender_request = |channel: Channel| { - let param = RouteParams(vec![channel.id().to_string(), CREATOR.to_string()]); + let add_spender_request = |channel_context: &ChainOf| { + let param = RouteParams(vec![ + channel_context.context.id().to_string(), + CREATOR.to_string(), + ]); Request::builder() - .extension(channel) + .extension(channel_context.clone()) .extension(param) .body(Body::empty()) .expect("Should build Request") }; // Calling with non existent accounting - let res = add_spender_leaf(add_spender_request(channel), &app) + let res = add_spender_leaf(add_spender_request(&channel_context), &app) .await .expect("Should add"); assert_eq!(StatusCode::OK, res.status()); - let res = get_accounting_for_channel(get_accounting_request(channel), &app) + let res = get_accounting_for_channel(get_accounting_request(&channel_context), &app) .await .expect("should get response"); assert_eq!(StatusCode::OK, res.status()); @@ -787,11 +804,15 @@ mod test { balances .spend(*ADVERTISER, *GUARDIAN, UnifiedNum::from_u64(100)) .expect("Should not overflow"); - spend_amount(app.pool.clone(), channel.id(), balances.clone()) - .await - .expect("should spend"); + spend_amount( + app.pool.clone(), + channel_context.context.id(), + balances.clone(), + ) + .await + .expect("should spend"); - let res = get_accounting_for_channel(get_accounting_request(channel), &app) + let res = get_accounting_for_channel(get_accounting_request(&channel_context), &app) .await .expect("should get response"); assert_eq!(StatusCode::OK, res.status()); @@ -800,12 +821,12 @@ mod test { assert_eq!(balances, accounting_response.balances); - let res = add_spender_leaf(add_spender_request(channel), &app) + let res = add_spender_leaf(add_spender_request(&channel_context), &app) .await .expect("Should add"); assert_eq!(StatusCode::OK, res.status()); - let res = get_accounting_for_channel(get_accounting_request(channel), &app) + let res = get_accounting_for_channel(get_accounting_request(&channel_context), &app) .await .expect("should get response"); assert_eq!(StatusCode::OK, res.status()); diff --git a/test_harness/src/lib.rs b/test_harness/src/lib.rs index 3acfe16e1..bff5fd9ea 100644 --- a/test_harness/src/lib.rs +++ b/test_harness/src/lib.rs @@ -7,7 +7,7 @@ use adapter::ethereum::{ get_counterfactual_address, test_util::{ deploy_outpace_contract, deploy_sweeper_contract, deploy_token_contract, mock_set_balance, - outpace_deposit, GANACHE_URL, MOCK_TOKEN_ABI, + outpace_deposit, GANACHE_INFO_1337, MOCK_TOKEN_ABI, }, Options, OUTPACE_ABI, SWEEPER_ABI, }; @@ -18,38 +18,49 @@ use primitives::{ config::GANACHE_CONFIG, test_util::{FOLLOWER, LEADER}, util::ApiUrl, - Address, Config, + Address, Chain, Config, }; use web3::{contract::Contract, transports::Http, types::H160, Web3}; pub mod deposits; /// ganache-cli setup with deployed contracts using the snapshot directory -/// Uses the [`GANACHE_CONFIG`] & [`GANACHE_URL`] statics to init the contracts +/// NOTE: Current the snapshot and test setup use a single Chain. +/// +/// Uses Chain #1337 from the [`GANACHE_CONFIG`] static to init the contracts pub static SNAPSHOT_CONTRACTS: Lazy = Lazy::new(|| { - let web3 = Web3::new(Http::new(GANACHE_URL).expect("failed to init transport")); + let ganache_chain_info = GANACHE_INFO_1337.clone(); - let (token_address, token_info) = GANACHE_CONFIG - .token_address_whitelist - .iter() - .next() - .expect("Shanpshot token should be included in Ganache config"); + let web3 = Web3::new( + Http::new(ganache_chain_info.chain.rpc.as_str()).expect("failed to init transport"), + ); + + let token_info = ganache_chain_info + .tokens + .get("Mocked TOKEN") + .expect("Ganache config should contain for Chain #1337 the Mocked TOKEN"); + let chain = ganache_chain_info.chain.clone(); let token = ( // use Ganache Config token_info.clone(), - *token_address, - Contract::from_json(web3.eth(), H160(token_address.to_bytes()), &MOCK_TOKEN_ABI).unwrap(), + token_info.address, + Contract::from_json( + web3.eth(), + H160(token_info.address.to_bytes()), + &MOCK_TOKEN_ABI, + ) + .unwrap(), ); - let sweeper_address = Address::from(GANACHE_CONFIG.sweeper_address); + let sweeper_address = Address::from(ganache_chain_info.chain.sweeper); let sweeper = ( sweeper_address, Contract::from_json(web3.eth(), H160(sweeper_address.to_bytes()), &SWEEPER_ABI).unwrap(), ); - let outpace_address = Address::from(GANACHE_CONFIG.outpace_address); + let outpace_address = Address::from(ganache_chain_info.chain.outpace); let outpace = ( outpace_address, @@ -60,6 +71,7 @@ pub static SNAPSHOT_CONTRACTS: Lazy = Lazy::new(|| { token, sweeper, outpace, + chain, } }); @@ -128,7 +140,7 @@ pub static VALIDATORS: Lazy> = Lazy::new(|| { }); pub struct Setup { - pub web3: Web3, + pub chain: Chain, } #[derive(Debug, Clone)] @@ -136,22 +148,27 @@ pub struct Contracts { pub token: (TokenInfo, Address, Contract), pub sweeper: (Address, Contract), pub outpace: (Address, Contract), + pub chain: Chain, } impl Setup { pub async fn deploy_contracts(&self) -> Contracts { + let transport = Http::new(self.chain.rpc.as_str()).expect("Invalid RPC for chain!"); + + let web3 = Web3::new(transport); + // deploy contracts // TOKEN contract is with precision 18 (like DAI) // set the minimum token units to 1 TOKEN - let token = deploy_token_contract(&self.web3, 10_u64.pow(18)) + let token = deploy_token_contract(&web3, 10_u64.pow(18)) .await .expect("Correct parameters are passed to the Token constructor."); - let sweeper = deploy_sweeper_contract(&self.web3) + let sweeper = deploy_sweeper_contract(&web3) .await .expect("Correct parameters are passed to the Sweeper constructor."); - let outpace = deploy_outpace_contract(&self.web3) + let outpace = deploy_outpace_contract(&web3) .await .expect("Correct parameters are passed to the OUTPACE constructor."); @@ -159,6 +176,7 @@ impl Setup { token, sweeper, outpace, + chain: self.chain.clone(), } } @@ -207,7 +225,7 @@ mod tests { use crate::run::run_sentry_app; use super::*; - use adapter::ethereum::test_util::{GANACHE_URL, KEYSTORES}; + use adapter::ethereum::test_util::{GANACHE_1337, KEYSTORES}; use adapter::{prelude::*, Adapter, Ethereum}; use primitives::{ balances::CheckedState, @@ -215,16 +233,17 @@ mod tests { spender::Spender, test_util::{ADVERTISER, DUMMY_AD_UNITS, DUMMY_IPFS, GUARDIAN, GUARDIAN_2, PUBLISHER}, util::{logging::new_logger, ApiUrl}, - Balances, BigNum, Campaign, CampaignId, Channel, ChannelId, UnifiedNum, + Balances, BigNum, Campaign, CampaignId, ChainOf, Channel, ChannelId, UnifiedNum, }; use reqwest::{Client, StatusCode}; - use validator_worker::{sentry_interface::Validator, worker::Worker, SentryApi}; + use validator_worker::{worker::Worker, SentryApi}; #[tokio::test] #[ignore = "We use a snapshot, however, we have left this test for convenience"] async fn deploy_contracts() { - let web3 = Web3::new(Http::new(GANACHE_URL).expect("failed to init transport")); - let setup = Setup { web3 }; + let setup = Setup { + chain: GANACHE_1337.clone(), + }; // deploy contracts let _contracts = setup.deploy_contracts().await; } @@ -388,8 +407,20 @@ mod tests { #[tokio::test(flavor = "multi_thread", worker_threads = 4)] async fn run_full_test() { - let web3 = Web3::new(Http::new(GANACHE_URL).expect("failed to init transport")); - let setup = Setup { web3 }; + // for now we are running tests only on a single Chain! + // It is safe to use a single ChainOf for both Campaigns + let chain = GANACHE_1337.clone(); + assert_eq!(CAMPAIGN_1.channel.token, CAMPAIGN_2.channel.token); + + let token_chain = GANACHE_CONFIG + .find_chain_token(CAMPAIGN_1.channel.token) + .expect("Should find CAMPAIGN_1 channel token address in Config!"); + + assert_eq!(&token_chain.chain, &chain, "CAMPAIGN_1 & CAMPAIGN_2 should be both using the same Chain which is setup in the Ganache Config"); + let setup = Setup { + chain: chain.clone(), + }; + // Use snapshot contracts let contracts = SNAPSHOT_CONTRACTS.clone(); // let contracts = setup.deploy_contracts().await; @@ -418,43 +449,21 @@ mod tests { .unlock() .expect("Failed to unlock Follower ethereum adapter"); - let leader_sentry = { - // should get self Auth from Leader's EthereumAdapter - let leader_auth = leader_adapter - .get_auth(leader_adapter.whoami()) - .expect("Get authentication"); - let whoami_validator = Validator { - url: leader.sentry_url.clone(), - token: leader_auth, - }; - - SentryApi::new( - leader_adapter.clone(), - new_logger(&leader.worker_logger_prefix), - leader.config.clone(), - whoami_validator, - ) - .expect("Should create new SentryApi for the Leader Worker") - }; - - let follower_sentry = { - // should get self Auth from Follower's EthereumAdapter - let follower_auth = follower_adapter - .get_auth(follower_adapter.whoami()) - .expect("Get authentication"); - let whoami_validator = Validator { - url: follower.sentry_url.clone(), - token: follower_auth, - }; + let leader_sentry = SentryApi::new( + leader_adapter.clone(), + new_logger(&leader.worker_logger_prefix), + leader.config.clone(), + leader.sentry_url.clone(), + ) + .expect("Should create new SentryApi for the Leader Worker"); - SentryApi::new( - follower_adapter.clone(), - new_logger(&follower.worker_logger_prefix), - follower.config.clone(), - whoami_validator, - ) - .expect("Should create new SentryApi for the Leader Worker") - }; + let follower_sentry = SentryApi::new( + follower_adapter.clone(), + new_logger(&follower.worker_logger_prefix), + follower.config.clone(), + follower.sentry_url.clone(), + ) + .expect("Should create new SentryApi for the Leader Worker"); // check Campaign Leader & Follower urls // they should be the same as the test validators @@ -507,7 +516,7 @@ mod tests { // make sure we have the expected deposit returned from EthereumAdapter let eth_deposit = leader_adapter .get_deposit( - &CAMPAIGN_1.channel, + &token_chain.clone().with_channel(CAMPAIGN_1.channel), advertiser_adapter.whoami().to_address(), ) .await @@ -523,7 +532,7 @@ mod tests { // make sure we have the expected deposit returned from EthereumAdapter let eth_deposit = leader_adapter .get_deposit( - &CAMPAIGN_2.channel, + &token_chain.clone().with_channel(CAMPAIGN_2.channel), advertiser_adapter.whoami().to_address(), ) .await @@ -539,7 +548,7 @@ mod tests { // GET /v5/channel/{}/spender/all { let leader_auth = advertiser_adapter - .get_auth(leader_adapter.whoami()) + .get_auth(chain.chain_id, leader_adapter.whoami()) .expect("Get authentication"); let leader_response = get_spender_all_page_0( @@ -560,7 +569,7 @@ mod tests { // POST /v5/campaign { let leader_auth = advertiser_adapter - .get_auth(leader_adapter.whoami()) + .get_auth(chain.chain_id, leader_adapter.whoami()) .expect("Get authentication"); let mut no_budget_campaign = CreateCampaign::from_campaign(CAMPAIGN_1.clone()); @@ -595,7 +604,7 @@ mod tests { // GET /v5/channel/{}/spender/all { let leader_response = leader_sentry - .get_all_spenders(CAMPAIGN_1.channel.id()) + .get_all_spenders(&token_chain.clone().with_channel(CAMPAIGN_1.channel)) .await .expect("Should return Response"); @@ -620,7 +629,7 @@ mod tests { let create_campaign_1 = CreateCampaign::from_campaign(CAMPAIGN_1.clone()); { let leader_token = advertiser_adapter - .get_auth(leader_adapter.whoami()) + .get_auth(chain.chain_id, leader_adapter.whoami()) .expect("Get authentication"); let leader_response = create_campaign( @@ -637,7 +646,7 @@ mod tests { { let follower_token = advertiser_adapter - .get_auth(follower_adapter.whoami()) + .get_auth(chain.chain_id, follower_adapter.whoami()) .expect("Get authentication"); let follower_response = create_campaign( @@ -662,7 +671,7 @@ mod tests { { let leader_token = advertiser_adapter - .get_auth(leader_adapter.whoami()) + .get_auth(chain.chain_id, leader_adapter.whoami()) .expect("Get authentication"); let leader_response = create_campaign( @@ -680,7 +689,7 @@ mod tests { { let follower_token = advertiser_adapter - .get_auth(follower_adapter.whoami()) + .get_auth(token_chain.chain.chain_id, follower_adapter.whoami()) .expect("Get authentication"); let follower_response = create_campaign( @@ -710,7 +719,7 @@ mod tests { balances: Balances::::new(), }; let actual_accounting = leader_sentry - .get_accounting(CAMPAIGN_1.channel.id()) + .get_accounting(&token_chain.clone().with_channel(CAMPAIGN_1.channel)) .await .expect("Should get Channel Accounting"); @@ -746,9 +755,13 @@ mod tests { }, ]; - let response = post_new_events(&leader_sentry, CAMPAIGN_1.id, &events) - .await - .expect("Posted events"); + let response = post_new_events( + &leader_sentry, + token_chain.clone().with(CAMPAIGN_1.id), + &events, + ) + .await + .expect("Posted events"); assert_eq!(SuccessResponse { success: true }, response) } @@ -798,7 +811,7 @@ mod tests { }; let actual_accounting = leader_sentry - .get_accounting(CAMPAIGN_1.channel.id()) + .get_accounting(&token_chain.with_channel(CAMPAIGN_1.channel)) .await .expect("Should get Channel Accounting"); @@ -842,24 +855,27 @@ mod tests { /// Asserts: [`StatusCode::OK`] async fn post_new_events( sentry: &SentryApi, - campaign: CampaignId, + campaign_context: ChainOf, events: &[Event], ) -> anyhow::Result { let endpoint_url = sentry - .whoami - .url - .join(&format!("v5/campaign/{}/events", campaign)) + .sentry_url + .join(&format!("v5/campaign/{}/events", campaign_context.context)) .expect("valid endpoint"); let request_body = vec![("events".to_string(), events)] .into_iter() .collect::>(); + let auth_token = sentry + .adapter + .get_auth(campaign_context.chain.chain_id, sentry.adapter.whoami())?; + let response = sentry .client .post(endpoint_url) .json(&request_body) - .bearer_auth(&sentry.whoami.token) + .bearer_auth(&auth_token) .send() .await?; diff --git a/validator_worker/src/channel.rs b/validator_worker/src/channel.rs index ae9144bda..33639ef10 100644 --- a/validator_worker/src/channel.rs +++ b/validator_worker/src/channel.rs @@ -4,7 +4,7 @@ use crate::{ }; use adapter::prelude::*; -use primitives::{config::Config, Channel, ChannelId}; +use primitives::{config::Config, ChainOf, Channel, ChannelId}; use slog::info; use std::time::Duration; use tokio::time::timeout; @@ -12,22 +12,24 @@ use tokio::time::timeout; pub async fn channel_tick( sentry: &SentryApi, config: &Config, - channel: Channel, + channel_context: ChainOf, ) -> Result<(ChannelId, Box), Error> { let logger = sentry.logger.clone(); + let channel = channel_context.context; let adapter = &sentry.adapter; - let tick = channel + let tick = channel_context + .context .find_validator(adapter.whoami()) .ok_or(Error::ChannelNotIntendedForUs)?; // 1. `GET /channel/:id/spender/all` - let all_spenders = sentry.get_all_spenders(channel.id()).await?; + let all_spenders = sentry.get_all_spenders(&channel_context).await?; // 2. `GET /channel/:id/accounting` // Validation #1: // sum(Accounting.spenders) == sum(Accounting.earners) - let accounting = sentry.get_accounting(channel.id()).await?; + let accounting = sentry.get_accounting(&channel_context).await?; // Validation #2: // spender.total_deposit >= accounting.balances.spenders[spender.address] @@ -43,17 +45,12 @@ pub async fn channel_tick( return Err(Error::Validation); } - let token = config - .token_address_whitelist - .get(&channel.token) - .ok_or(Error::ChannelTokenNotWhitelisted)?; - let duration = Duration::from_millis(config.channel_tick_timeout as u64); match tick { primitives::Validator::Leader(_v) => match timeout( duration, - leader::tick(sentry, channel, accounting.balances, token), + leader::tick(sentry, &channel_context, accounting.balances), ) .await { @@ -72,7 +69,7 @@ pub async fn channel_tick( }, primitives::Validator::Follower(_v) => { let follower_fut = - follower::tick(sentry, channel, all_spenders, accounting.balances, token); + follower::tick(sentry, &channel_context, all_spenders, accounting.balances); match timeout(duration, follower_fut).await { Err(timeout_e) => Err(Error::FollowerTick( channel.id(), diff --git a/validator_worker/src/follower.rs b/validator_worker/src/follower.rs index fe30e84b2..10d328434 100644 --- a/validator_worker/src/follower.rs +++ b/validator_worker/src/follower.rs @@ -4,10 +4,9 @@ use adapter::{prelude::*, Error as AdapterError}; use primitives::{ balances, balances::{Balances, CheckedState, UncheckedState}, - config::TokenInfo, spender::Spender, validator::{ApproveState, MessageTypes, NewState, RejectState}, - Address, Channel, UnifiedNum, + Address, ChainOf, Channel, UnifiedNum, }; use crate::{ @@ -85,13 +84,12 @@ pub struct TickStatus { pub async fn tick( sentry: &SentryApi, - channel: Channel, + channel_context: &ChainOf, all_spenders: HashMap, accounting_balances: Balances, - token: &TokenInfo, ) -> Result { - let from = channel.leader; - let channel_id = channel.id(); + let from = channel_context.context.leader; + let channel_id = channel_context.context.id(); // TODO: Context for All spender sum Error when overflow occurs let all_spenders_sum = all_spenders @@ -126,10 +124,9 @@ pub async fn tick( let approve_state_result = if let (Some(new_state), false) = (new_msg, latest_is_responded_to) { on_new_state( sentry, - channel, + channel_context, accounting_balances, new_state, - token, all_spenders_sum, ) .await? @@ -138,24 +135,31 @@ pub async fn tick( }; Ok(TickStatus { - heartbeat: heartbeat(sentry, channel).await?, + heartbeat: heartbeat(sentry, channel_context).await?, approve_state: approve_state_result, }) } async fn on_new_state<'a, C: Unlocked + 'static>( sentry: &'a SentryApi, - channel: Channel, + channel_context: &'a ChainOf, accounting_balances: Balances, new_state: NewState, - token_info: &TokenInfo, all_spenders_sum: UnifiedNum, ) -> Result { + let channel = channel_context.context; + let proposed_balances = match new_state.balances.clone().check() { Ok(balances) => balances, // TODO: Should we show the Payout Mismatch between Spent & Earned? Err(balances::Error::PayoutMismatch { .. }) => { - return Ok(on_error(sentry, channel, new_state, InvalidNewState::Transition).await) + return on_error( + sentry, + channel_context, + new_state, + InvalidNewState::Transition, + ) + .await; } // TODO: Add context for `proposed_balances.check()` overflow error Err(_) => return Err(Error::Overflow), @@ -163,15 +167,29 @@ async fn on_new_state<'a, C: Unlocked + 'static>( let proposed_state_root = new_state.state_root.clone(); - if proposed_state_root != proposed_balances.encode(channel.id(), token_info.precision.get())? { - return Ok(on_error(sentry, channel, new_state, InvalidNewState::RootHash).await); + if proposed_state_root + != proposed_balances.encode(channel.id(), channel_context.token.precision.get())? + { + return on_error( + sentry, + channel_context, + new_state, + InvalidNewState::RootHash, + ) + .await; } if !sentry .adapter .verify(channel.leader, &proposed_state_root, &new_state.signature)? { - return Ok(on_error(sentry, channel, new_state, InvalidNewState::Signature).await); + return on_error( + sentry, + channel_context, + new_state, + InvalidNewState::Signature, + ) + .await; } let last_approve_response = sentry.get_last_approved(channel.id()).await?; @@ -185,7 +203,13 @@ async fn on_new_state<'a, C: Unlocked + 'static>( Ok(None) => Default::default(), // TODO: Add Context for Transition error Err(_err) => { - return Ok(on_error(sentry, channel, new_state, InvalidNewState::Transition).await) + return on_error( + sentry, + channel_context, + new_state, + InvalidNewState::Transition, + ) + .await; } }; @@ -202,7 +226,13 @@ async fn on_new_state<'a, C: Unlocked + 'static>( .ok_or(Error::Overflow)? { // TODO: Add context for error in Spenders transition - return Ok(on_error(sentry, channel, new_state, InvalidNewState::Transition).await); + return on_error( + sentry, + channel_context, + new_state, + InvalidNewState::Transition, + ) + .await; } // 2. Check the transition of previous and proposed Earners maps @@ -218,7 +248,13 @@ async fn on_new_state<'a, C: Unlocked + 'static>( .ok_or(Error::Overflow)? { // TODO: Add context for error in Earners transition - return Ok(on_error(sentry, channel, new_state, InvalidNewState::Transition).await); + return on_error( + sentry, + channel_context, + new_state, + InvalidNewState::Transition, + ) + .await; } let health_earners = get_health( @@ -228,13 +264,13 @@ async fn on_new_state<'a, C: Unlocked + 'static>( ) .ok_or(Error::Overflow)?; if health_earners < u64::from(sentry.config.health_unsignable_promilles) { - return Ok(on_error( + return on_error( sentry, - channel, + channel_context, new_state, InvalidNewState::Health(Health::Earners(health_earners)), ) - .await); + .await; } let health_spenders = get_health( @@ -244,13 +280,13 @@ async fn on_new_state<'a, C: Unlocked + 'static>( ) .ok_or(Error::Overflow)?; if health_spenders < u64::from(sentry.config.health_unsignable_promilles) { - return Ok(on_error( + return on_error( sentry, - channel, + channel_context, new_state, InvalidNewState::Health(Health::Spenders(health_spenders)), ) - .await); + .await; } let signature = sentry.adapter.sign(&new_state.state_root)?; @@ -259,27 +295,27 @@ async fn on_new_state<'a, C: Unlocked + 'static>( let propagation_result = sentry .propagate( - channel, + channel_context, &[&MessageTypes::ApproveState(ApproveState { state_root: proposed_state_root, signature, is_healthy, })], ) - .await; + .await?; Ok(ApproveStateResult::Sent(Some(propagation_result))) } async fn on_error<'a, C: Unlocked + 'static>( sentry: &'a SentryApi, - channel: Channel, + channel_context: &ChainOf, new_state: NewState, status: InvalidNewState, -) -> ApproveStateResult { +) -> Result { let propagation = sentry .propagate( - channel, + channel_context, &[&MessageTypes::RejectState(RejectState { reason: status.to_string(), state_root: new_state.state_root.clone(), @@ -289,11 +325,11 @@ async fn on_error<'a, C: Unlocked + 'static>( timestamp: Some(Utc::now()), })], ) - .await; + .await?; - ApproveStateResult::RejectedState { + Ok(ApproveStateResult::RejectedState { reason: status, state_root: new_state.state_root.clone(), propagation, - } + }) } diff --git a/validator_worker/src/heartbeat.rs b/validator_worker/src/heartbeat.rs index 19dee87e7..87cb5578b 100644 --- a/validator_worker/src/heartbeat.rs +++ b/validator_worker/src/heartbeat.rs @@ -5,7 +5,7 @@ use byteorder::{BigEndian, ByteOrder}; use primitives::{ merkle_tree::MerkleTree, validator::{Heartbeat, MessageTypes}, - Channel, + ChainOf, Channel, }; use thiserror::Error; @@ -25,9 +25,11 @@ pub enum Error { pub async fn heartbeat( iface: &SentryApi, - channel: Channel, + channel_context: &ChainOf, ) -> Result { - let validator_message_response = iface.get_our_latest_msg(channel.id(), &["Heartbeat"]).await?; + let validator_message_response = iface + .get_our_latest_msg(channel_context.context.id(), &["Heartbeat"]) + .await?; let heartbeat_msg = match validator_message_response { Some(MessageTypes::Heartbeat(heartbeat)) => Some(heartbeat), _ => None, @@ -39,7 +41,7 @@ pub async fn heartbeat( }); if should_send { - Ok(Some(send_heartbeat(iface, channel).await?)) + Ok(Some(send_heartbeat(iface, channel_context).await?)) } else { Ok(None) } @@ -47,7 +49,7 @@ pub async fn heartbeat( async fn send_heartbeat( iface: &SentryApi, - channel: Channel, + channel_context: &ChainOf, ) -> Result, Error> { let mut timestamp_buf = [0_u8; 32]; let milliseconds: u64 = u64::try_from(Utc::now().timestamp_millis()) @@ -56,7 +58,8 @@ async fn send_heartbeat( let merkle_tree = MerkleTree::new(&[timestamp_buf])?; - let state_root_raw = get_signable_state_root(channel.id().as_ref(), &merkle_tree.root()); + let state_root_raw = + get_signable_state_root(channel_context.context.id().as_ref(), &merkle_tree.root()); let state_root = hex::encode(state_root_raw); let signature = iface.adapter.sign(&state_root)?; @@ -67,5 +70,5 @@ async fn send_heartbeat( timestamp: Utc::now(), }); - Ok(iface.propagate(channel, &[&message_types]).await) + Ok(iface.propagate(channel_context, &[&message_types]).await?) } diff --git a/validator_worker/src/leader.rs b/validator_worker/src/leader.rs index 80fbd1b45..6ae0382aa 100644 --- a/validator_worker/src/leader.rs +++ b/validator_worker/src/leader.rs @@ -3,9 +3,8 @@ use thiserror::Error; use adapter::{prelude::*, Error as AdapterError}; use primitives::{ balances::CheckedState, - config::TokenInfo, validator::{MessageError, MessageTypes, NewState}, - Balances, Channel, + Balances, ChainOf, Channel, }; use crate::{ @@ -39,10 +38,11 @@ pub enum Error { pub async fn tick( sentry: &SentryApi, - channel: Channel, + channel_context: &ChainOf, accounting_balances: Balances, - token: &TokenInfo, ) -> Result { + let channel = channel_context.context; + // Check if Accounting != than latest NewState (Accounting.balances != NewState.balances) let should_generate_new_state = { @@ -91,37 +91,39 @@ pub async fn tick( // Create a `NewState` if balances have changed let new_state = if should_generate_new_state { - Some(on_new_accounting(sentry, channel, accounting_balances, token).await?) + Some(on_new_accounting(sentry, channel_context, accounting_balances).await?) } else { None }; Ok(TickStatus { - heartbeat: heartbeat(sentry, channel).await?, + heartbeat: heartbeat(sentry, channel_context).await?, new_state, }) } async fn on_new_accounting( sentry: &SentryApi, - channel: Channel, + channel_context: &ChainOf, accounting_balances: Balances, - token: &TokenInfo, ) -> Result, Error> { - let state_root = accounting_balances.encode(channel.id(), token.precision.get())?; + let state_root = accounting_balances.encode( + channel_context.context.id(), + channel_context.token.precision.get(), + )?; let signature = sentry.adapter.sign(&state_root)?; let propagation_results = sentry .propagate( - channel, + channel_context, &[&MessageTypes::NewState(NewState { state_root, signature, balances: accounting_balances.into_unchecked(), })], ) - .await; + .await?; Ok(propagation_results) } diff --git a/validator_worker/src/lib.rs b/validator_worker/src/lib.rs index 74e24f1df..3276ac072 100644 --- a/validator_worker/src/lib.rs +++ b/validator_worker/src/lib.rs @@ -75,12 +75,21 @@ fn get_state_root_hash( mod test { use super::*; - use primitives::util::tests::prep_db::{ADDRESSES, DUMMY_CAMPAIGN, DUMMY_CHANNEL_ID}; + use primitives::{channel::Nonce, test_util::IDS, util::tests::prep_db::ADDRESSES, Channel}; #[test] // TODO: Double check this test - encoded value! after introducing `spenders` ("spender", address, amount) fn get_state_root_hash_returns_correct_hash() { - let channel = DUMMY_CAMPAIGN.channel; + let channel = Channel { + leader: IDS["leader"], + follower: IDS["follower"], + guardian: IDS["tester"].to_address(), + // DAI on goerli + token: "0x73967c6a0904aa032c103b4104747e88c566b1a2" + .parse() + .expect("Valid DAI token address"), + nonce: Nonce::from(987_654_321_u32), + }; let mut balances = Balances::::default(); @@ -103,7 +112,9 @@ mod test { /// we re-use it in order to double check if we haven't change anything with the `get_state_root_hash()` changes /// when we introduced `spenders` `("spender", address, amount)` & `UnifiedNum` fn get_state_root_hash_returns_correct_hash_for_added_address_to_spenders() { - let channel = *DUMMY_CHANNEL_ID; + let channel = "061d5e2a67d0a9a10f1c732bca12a676d83f79663a396f7d87b3e30b9b411088" + .parse() + .expect("Valid ChannelId"); let mut balances = Balances::::default(); balances.add_earner(ADDRESSES["publisher"]); diff --git a/validator_worker/src/main.rs b/validator_worker/src/main.rs index 10556babb..e3bfbeebc 100644 --- a/validator_worker/src/main.rs +++ b/validator_worker/src/main.rs @@ -5,14 +5,14 @@ use std::error::Error; use clap::{crate_version, App, Arg}; -use adapter::{prelude::*, primitives::AdapterTypes, Adapter, Dummy, Ethereum}; +use adapter::{primitives::AdapterTypes, Adapter, Dummy, Ethereum}; use primitives::{ config::{configuration, Environment}, test_util::DUMMY_AUTH, util::logging::new_logger, ValidatorId, }; -use validator_worker::{sentry_interface::Validator, SentryApi, Worker}; +use validator_worker::{SentryApi, Worker}; fn main() -> Result<(), Box> { let cli = App::new("Validator worker") @@ -117,27 +117,13 @@ fn main() -> Result<(), Box> { match unlocked_adapter { AdapterTypes::Ethereum(eth_adapter) => { - let whoami = Validator { - url: sentry_url, - token: eth_adapter - .get_auth(eth_adapter.whoami()) - .expect("Failed to get Authentication token for Who am I"), - }; - - let sentry = SentryApi::new(*eth_adapter, logger.clone(), config, whoami) + let sentry = SentryApi::new(*eth_adapter, logger.clone(), config, sentry_url) .expect("Should create the SentryApi"); Worker::from_sentry(sentry).run(is_single_tick) } AdapterTypes::Dummy(dummy_adapter) => { - let whoami = Validator { - url: sentry_url, - token: dummy_adapter - .get_auth(dummy_adapter.whoami()) - .expect("Failed to get Authentication token for Who am I"), - }; - - let sentry = SentryApi::new(*dummy_adapter, logger.clone(), config, whoami) + let sentry = SentryApi::new(*dummy_adapter, logger.clone(), config, sentry_url) .expect("Should create the SentryApi"); Worker::from_sentry(sentry).run(is_single_tick) diff --git a/validator_worker/src/sentry_interface.rs b/validator_worker/src/sentry_interface.rs index ca3b981d9..c558c4f40 100644 --- a/validator_worker/src/sentry_interface.rs +++ b/validator_worker/src/sentry_interface.rs @@ -17,12 +17,14 @@ use primitives::{ spender::Spender, util::ApiUrl, validator::MessageTypes, - Address, Channel, ChannelId, Config, ValidatorId, + Address, ChainId, ChainOf, Channel, ChannelId, Config, ValidatorId, }; use thiserror::Error; pub type PropagationResult = Result; +pub type ChainsValidators = HashMap; /// Propagate the Validator messages to these `Validator`s +/// This map contains the Validator Auth token & Url for a specific Chain pub type Validators = HashMap; pub type AuthToken = String; @@ -43,19 +45,39 @@ pub enum Error { /// Error returned when the passed [`Validators`] to [`SentryApi::init()`] do not contain /// the _Who am I_ a record of the [`Adapter::whoami()`] #[error( - "Missing validator URL & Auth token entry for whoami {whoami:#?} in the propagation list" + "Missing validator URL & Auth token entry for whoami {whoami:#?} on chain {chain_id:#?} in the propagation list" )] - WhoamiMissing { whoami: ValidatorId }, + WhoamiMissing { + whoami: ValidatorId, + chain_id: ChainId, + }, + #[error("We can propagate only to Chains which are whiteslisted for this validator.")] + ChainNotWhitelisted { chain_id: ChainId }, + #[error("Failed to generate authentication token using the Adapter for {for_chain:?}")] + AuthenticationToken { for_chain: ChainId }, + #[error("Not all channel validators were found in the propagation list")] + PropagationValidatorsNotFound { + channel: Vec, + found: HashMap, + }, } #[derive(Debug)] -pub struct SentryApi { +pub struct SentryApi { pub adapter: Adapter, pub client: Client, pub logger: Logger, pub config: Config, - pub whoami: Validator, - /// If set with [`Validators`], `propagate_to` should contain the `whoami` [`Validator`]. + /// For all the calls that do not have information about the Chains + pub sentry_url: ApiUrl, + /// Whilelisted chains for which this validator (_Who Am I_) can operate on. + /// + /// Since the validator might have different urls for old vs new Campaigns, + /// we can override the URL based on the campaign, see [`crate::Worker`]. + /// Auth token for this validator is generated for each Chain on [`SentryApi::new`] + pub whoami: HashMap, + /// If set with [`Validators`], `propagate_to` should contain the `whoami` [`Validator`] in each Chain! + /// use [`SentryApi::init`] or [`SentryApi::with_propagate`] instead pub propagate_to: P, } @@ -66,6 +88,7 @@ impl Clone for SentryApi { client: self.client.clone(), logger: self.logger.clone(), config: self.config.clone(), + sentry_url: self.sentry_url.clone(), whoami: self.whoami.clone(), propagate_to: self.propagate_to.clone(), } @@ -73,63 +96,98 @@ impl Clone for SentryApi { } impl SentryApi { + /// `sentry_url` is the default URL to which the current _Who am I_ validator should make requests. + /// It is used to populate the config Chains with Authentication Token & [`ApiUrl`]. + /// This value can be overwritten using `propagate_to`, + /// if any of the passed validators has the same [`ValidatorId`]. pub fn new( adapter: Adapter, logger: Logger, config: Config, - whoami_validator: Validator, + sentry_url: ApiUrl, ) -> Result, Error> { let client = Client::builder() .timeout(Duration::from_millis(config.fetch_timeout.into())) .build() .map_err(Error::BuildingClient)?; + let whoami = config + .chains + .values() + .map( + |chain_info| match adapter.get_auth(chain_info.chain.chain_id, adapter.whoami()) { + Ok(auth_token) => { + let validator = Validator { + url: sentry_url.clone(), + token: auth_token, + }; + + Ok((chain_info.chain.chain_id, validator)) + } + Err(_adapter_err) => Err(Error::AuthenticationToken { + for_chain: chain_info.chain.chain_id, + }), + }, + ) + .collect::, _>>()?; + Ok(SentryApi { adapter, client, logger, config, - whoami: whoami_validator, + sentry_url, + whoami, propagate_to: (), }) } - /// Initialize the [`SentryApi`] and makes sure that [`Adapter::whoami()`] is present in [`Validators`]. - /// Sets the _Who am I_ [`ApiUrl`] and the Authentication Token for calls requiring authentication. + /// Initialize the [`SentryApi`] and makes sure that [`Adapter::whoami()`] is present in each chain [`Validators`]. + /// Sets the _Who am I_ [`ApiUrl`] and the Authentication Token for a specific Chain for calls that require authentication. pub fn init( adapter: Adapter, logger: Logger, config: Config, - propagate_to: Validators, - ) -> Result, Error> { - let whoami = propagate_to - .get(&adapter.whoami()) - .cloned() - .ok_or_else(|| Error::WhoamiMissing { - whoami: adapter.whoami(), - })?; + sentry_url: ApiUrl, + propagate_to: ChainsValidators, + ) -> Result, Error> { + let sentry_api = SentryApi::new(adapter, logger, config, sentry_url)?; - let sentry_api = SentryApi::new(adapter, logger, config, whoami)?; - - Ok(sentry_api.with_propagate(propagate_to)) + sentry_api.with_propagate(propagate_to) } - /// If the _Who am I_ Validator is not found in `propagate_to` it will add it. + /// If the _Who am I_ Validator is not found in `propagate_to` it will be added. /// Propagation should happen to all validators Sentry instances including _Who am I_ - /// i.e. the current validator - pub fn with_propagate(self, mut propagate_to: Validators) -> SentryApi { - let _ = propagate_to - .entry(self.adapter.whoami()) - .or_insert_with(|| self.whoami.clone()); + /// i.e. the current validator. + /// If a Chain in propagate_to is not setup ([`SentryApi::whoami`]) for this instance, an error is returned. + pub fn with_propagate( + self, + mut propagate_to: ChainsValidators, + ) -> Result, Error> { + for (chain_id, validators) in propagate_to.iter_mut() { + // validate that the chain is whiteslited + let whoami_validator = self + .whoami + .get(chain_id) + .ok_or(Error::ChainNotWhitelisted { + chain_id: *chain_id, + })?; + + // if _Who Am I_ is not found, insert from the setup Chains whoami + validators + .entry(self.adapter.whoami()) + .or_insert_with(|| whoami_validator.clone()); + } - SentryApi { + Ok(SentryApi { adapter: self.adapter, client: self.client, logger: self.logger, config: self.config, + sentry_url: self.sentry_url, whoami: self.whoami, propagate_to, - } + }) } } @@ -143,8 +201,7 @@ impl SentryApi { let message_type = message_types.join("+"); let endpoint = self - .whoami - .url + .sentry_url .join(&format!( "v5/channel/{}/validator-messages/{}/{}?limit=1", channel, from, message_type @@ -178,8 +235,7 @@ impl SentryApi { ) -> Result, Error> { self.client .get( - self.whoami - .url + self.sentry_url .join(&format!( "v5/channel/{}/last-approved?withHeartbeat=true", channel @@ -196,18 +252,28 @@ impl SentryApi { /// page always starts from 0 pub async fn get_spenders_page( &self, - channel: &ChannelId, + channel_context: &ChainOf, page: u64, ) -> Result { + let channel_id = channel_context.context.id(); let url = self - .whoami - .url - .join(&format!("v5/channel/{}/spender/all?page={}", channel, page)) + .sentry_url + .join(&format!( + "v5/channel/{}/spender/all?page={}", + channel_id, page + )) .expect("Should not error when creating endpoint"); + let auth_token = self + .adapter + .get_auth(channel_context.chain.chain_id, self.adapter.whoami()) + .map_err(|_adapter_err| Error::AuthenticationToken { + for_chain: channel_context.chain.chain_id, + })?; + self.client .get(url) - .bearer_auth(&self.whoami.token) + .bearer_auth(&auth_token) .send() .await? .json() @@ -217,15 +283,16 @@ impl SentryApi { pub async fn get_all_spenders( &self, - channel: ChannelId, + channel_context: &ChainOf, ) -> Result, Error> { - let first_page = self.get_spenders_page(&channel, 0).await?; + let first_page = self.get_spenders_page(channel_context, 0).await?; if first_page.pagination.total_pages < 2 { Ok(first_page.spenders) } else { let all: Vec = try_join_all( - (1..first_page.pagination.total_pages).map(|i| self.get_spenders_page(&channel, i)), + (1..first_page.pagination.total_pages) + .map(|i| self.get_spenders_page(channel_context, i)), ) .await?; @@ -242,20 +309,24 @@ impl SentryApi { /// `Balances` should always be in `CheckedState` pub async fn get_accounting( &self, - channel: ChannelId, + channel_context: &ChainOf, ) -> Result, Error> { let url = self - .whoami - .url - .join(&format!("v5/channel/{}/accounting", channel)) + .sentry_url + .join(&format!( + "v5/channel/{}/accounting", + channel_context.context.id() + )) .expect("Should not error when creating endpoint"); - let response = self - .client - .get(url) - .bearer_auth(&self.whoami.token) - .send() - .await?; + let auth_token = self + .adapter + .get_auth(channel_context.chain.chain_id, self.adapter.whoami()) + .map_err(|_adapter_err| Error::AuthenticationToken { + for_chain: channel_context.chain.chain_id, + })?; + + let response = self.client.get(url).bearer_auth(auth_token).send().await?; assert_eq!(reqwest::StatusCode::OK, response.status()); @@ -267,27 +338,36 @@ impl SentryApi { /// Fetches all `Campaign`s from the _Who am I_ Sentry. /// It builds the `Channel`s to be processed alongside all the `Validator`s' url & auth token. - pub async fn collect_channels(&self) -> Result<(HashSet, Validators), Error> { + pub async fn collect_channels( + &self, + ) -> Result<(HashSet>, ChainsValidators), Error> { let all_campaigns_timeout = Duration::from_millis(self.config.all_campaigns_timeout as u64); let client = reqwest::Client::builder() .timeout(all_campaigns_timeout) .build()?; let campaigns = - campaigns::all_campaigns(client, &self.whoami, Some(self.adapter.whoami())).await?; - let channels = campaigns - .iter() - .map(|campaign| campaign.channel) - .collect::>(); + campaigns::all_campaigns(client, &self.sentry_url, Some(self.adapter.whoami())).await?; + + let (validators, channels) = campaigns.into_iter().fold( + (ChainsValidators::new(), HashSet::>::new()), + |(mut validators, mut channels), campaign| { + let channel_context = match self.config.find_chain_token(campaign.channel.token) { + Some(chain_of) => chain_of.with_channel(campaign.channel), + // Skip the current Channel as the Chain/Token is not configured + None => return (validators, channels), + }; + + // prepare to populate the chain of the Campaign validators + let chain_validators = validators + .entry(channel_context.chain.chain_id) + .or_default(); - let validators = campaigns - .into_iter() - .fold(Validators::new(), |mut acc, campaign| { for validator_desc in campaign.validators.iter() { // if Validator is already there, we can just skip it // remember, the campaigns are ordered by `created DESC` // so we will always get the latest Validator url first - match acc.entry(validator_desc.id) { + match chain_validators.entry(validator_desc.id) { Entry::Occupied(_) => continue, Entry::Vacant(entry) => { // try to parse the url of the Validator Desc @@ -295,7 +375,9 @@ impl SentryApi { // and also try to find the Auth token in the config // if there was an error with any of the operations, skip this `ValidatorDesc` - let auth_token = self.adapter.get_auth(validator_desc.id); + let auth_token = self + .adapter + .get_auth(channel_context.chain.chain_id, validator_desc.id); // only if `ApiUrl` parsing is `Ok` & Auth Token is found in the `Adapter` if let (Ok(url), Ok(auth_token)) = (validator_url, auth_token) { @@ -305,35 +387,81 @@ impl SentryApi { token: auth_token, }); } - // otherwise it will try to do the same things on the next encounter of this `ValidatorId` + // otherwise it will try to do the same things on the next encounter of this + // `ValidatorId` for the particular `Chain` } } } - acc - }); + // last but not least insert the channel! + channels.insert(channel_context); + + (validators, channels) + }, + ); Ok((channels, validators)) } } +pub fn assert_result(assert: bool, or_error: E) -> Result<(), E> { + if assert { + Ok(()) + } else { + Err(or_error) + } +} + impl SentryApi { pub async fn propagate( &self, - channel: Channel, + channel_context: &ChainOf, messages: &[&MessageTypes], - ) -> Vec { - join_all(self.propagate_to.iter().filter(|(validator_id, _)| { - channel.leader == **validator_id || channel.follower == **validator_id - }).map(|(validator_id, validator)| { - propagate_to::( - &self.client, - self.config.propagation_timeout, - channel.id(), - (*validator_id, validator), - messages, - ) - })).await + ) -> Result, Error> { + let chain_validators = self + .propagate_to + .get(&channel_context.chain.chain_id) + .ok_or(Error::ChainNotWhitelisted { + chain_id: channel_context.chain.chain_id, + })?; + + let channel_validators = [ + channel_context.context.leader, + channel_context.context.follower, + ]; + + let propagate_to_validators = channel_validators + .iter() + .filter_map(|channel_validator| { + chain_validators + .get(channel_validator) + .cloned() + .map(|validator| (*channel_validator, validator)) + }) + .collect::>(); + + // check if we found all the channel validators in the propagation list + if propagate_to_validators.len() != channel_validators.len() { + return Err(Error::PropagationValidatorsNotFound { + channel: channel_validators.to_vec(), + found: propagate_to_validators, + }); + } + + let propagation_results = join_all(propagate_to_validators.iter().map( + |(validator_id, validator)| { + propagate_to::( + &self.client, + self.config.propagation_timeout, + channel_context.context.id(), + (*validator_id, validator), + messages, + ) + }, + )) + .await; + + Ok(propagation_results) } } @@ -432,28 +560,27 @@ pub mod campaigns { use futures::future::try_join_all; use primitives::{ sentry::campaign::{CampaignListQuery, CampaignListResponse, ValidatorParam}, + util::ApiUrl, Campaign, ValidatorId, }; use reqwest::Client; - use super::Validator; - /// Fetches all `Campaign`s from `sentry` by going through all pages and collecting the `Campaign`s into a single `Vec` /// You can filter by `&validator=0x...` when passing `for_validator`. /// This will return campaigns that include the provided `for_validator` validator. pub async fn all_campaigns( client: Client, - whoami: &Validator, + sentry_url: &ApiUrl, for_validator: Option, ) -> Result, reqwest::Error> { - let first_page = fetch_page(&client, whoami, 0, for_validator).await?; + let first_page = fetch_page(&client, sentry_url, 0, for_validator).await?; if first_page.pagination.total_pages < 2 { Ok(first_page.campaigns) } else { let all = try_join_all( (1..first_page.pagination.total_pages) - .map(|i| fetch_page(&client, whoami, i, for_validator)), + .map(|i| fetch_page(&client, sentry_url, i, for_validator)), ) .await?; @@ -467,7 +594,7 @@ pub mod campaigns { async fn fetch_page( client: &Client, - whoami: &Validator, + sentry_url: &ApiUrl, page: u64, for_validator: Option, ) -> Result { @@ -478,8 +605,7 @@ pub mod campaigns { validator: for_validator.map(ValidatorParam::Validator), }; - let endpoint = whoami - .url + let endpoint = sentry_url .join(&format!( "v5/campaign/list?{}", serde_urlencoded::to_string(query).expect("Should not fail to serialize") @@ -610,11 +736,13 @@ mod test { .mount(&server) .await; + let sentry_url = ApiUrl::from_str(&server.uri()).expect("Should parse"); + let mut validators = Validators::new(); validators.insert( DUMMY_VALIDATOR_LEADER.id, Validator { - url: ApiUrl::from_str(&server.uri()).expect("Should parse"), + url: sentry_url.clone(), token: AuthToken::default(), }, ); @@ -623,15 +751,22 @@ mod test { let adapter = Adapter::with_unlocked(Dummy::init(Options { dummy_identity: IDS["leader"], - dummy_auth_tokens: Default::default(), + dummy_auth_tokens: vec![(IDS["leader"].to_address(), "AUTH_Leader".into())] + .into_iter() + .collect(), })); let logger = discard_logger(); + let channel_context = config + .find_chain_token(DUMMY_CAMPAIGN.channel.token) + .expect("Should find Dummy campaign token in config") + .with_channel(DUMMY_CAMPAIGN.channel); + let sentry = - SentryApi::init(adapter, logger, config, validators).expect("Should build sentry"); + SentryApi::new(adapter, logger, config, sentry_url).expect("Should build sentry"); let mut res = sentry - .get_all_spenders(DUMMY_CAMPAIGN.channel.id()) + .get_all_spenders(&channel_context) .await .expect("should get response"); diff --git a/validator_worker/src/worker.rs b/validator_worker/src/worker.rs index bd36900bc..a885fb011 100644 --- a/validator_worker/src/worker.rs +++ b/validator_worker/src/worker.rs @@ -58,19 +58,27 @@ impl Worker { pub async fn all_channels_tick(&self) { let logger = &self.logger; - let (channels, validators) = match self.sentry.collect_channels().await { + let (channels_context, validators) = match self.sentry.collect_channels().await { Ok(res) => res, Err(err) => { error!(logger, "Error collecting all channels for tick"; "collect_channels" => ?err, "main" => "all_channels_tick"); return; } }; - let channels_size = channels.len(); + let channels_size = channels_context.len(); - let sentry_with_propagate = self.sentry.clone().with_propagate(validators); + let sentry_with_propagate = match self.sentry.clone().with_propagate(validators) { + Ok(sentry) => sentry, + Err(err) => { + error!(logger, "Failed to set propagation validators: {err}"; "err" => ?err, "main" => "all_channels_tick"); + return; + } + }; + + let tick_results = join_all(channels_context.into_iter().map(|channel_context| { + let channel = channel_context.context; - let tick_results = join_all(channels.into_iter().map(|channel| { - channel_tick(&sentry_with_propagate, &self.config, channel) + channel_tick(&sentry_with_propagate, &self.config, channel_context) .map_err(move |err| (channel, err)) })) .await;