From 6a9ca1a22668f7cefd9b102d534d9e516b52d646 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Fri, 12 Mar 2021 09:39:37 +0200 Subject: [PATCH 01/49] primitives - remove unused `ValidatorError` --- primitives/src/validator.rs | 8 -------- 1 file changed, 8 deletions(-) diff --git a/primitives/src/validator.rs b/primitives/src/validator.rs index 1dc9badbc..0024665d9 100644 --- a/primitives/src/validator.rs +++ b/primitives/src/validator.rs @@ -6,14 +6,6 @@ use std::fmt; use crate::{targeting::Value, BalancesMap, BigNum, DomainError, ToETHChecksum}; use std::convert::TryFrom; -#[derive(Debug)] -pub enum ValidatorError { - None, - InvalidRootHash, - InvalidSignature, - InvalidTransition, -} - #[derive(Deserialize, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[serde(transparent)] pub struct ValidatorId( From aaabc5add2e55cdbf860bdd07545351d46595493 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Fri, 12 Mar 2021 17:32:46 +0200 Subject: [PATCH 02/49] primitives - Channel v5 & Campaing + move structs --- primitives/src/campaign.rs | 101 +++++++++++++++++++++++++++++++++++ primitives/src/channel_v5.rs | 15 ++++++ primitives/src/lib.rs | 2 + 3 files changed, 118 insertions(+) create mode 100644 primitives/src/campaign.rs create mode 100644 primitives/src/channel_v5.rs diff --git a/primitives/src/campaign.rs b/primitives/src/campaign.rs new file mode 100644 index 000000000..0d671ca94 --- /dev/null +++ b/primitives/src/campaign.rs @@ -0,0 +1,101 @@ +use crate::{channel_v5::Channel, targeting::Rules, AdUnit, BigNum, EventSubmission, SpecValidators}; + +use chrono::{ + serde::{ts_milliseconds, ts_milliseconds_option}, + DateTime, Utc, +}; +use serde::{Deserialize, Serialize}; + +pub use pricing::{Pricing, PricingBounds}; + +#[derive(Debug, Serialize, Deserialize)] +pub struct Campaign { + channel: Channel, + spec: CampaignSpec, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct CampaignSpec { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub title: Option, + pub validators: SpecValidators, + /// Event pricing bounds + #[serde(default, skip_serializing_if = "Option::is_none")] + pub pricing_bounds: Option, + /// EventSubmission object, applies to event submission (POST /channel/:id/events) + #[serde(default, skip_serializing_if = "Option::is_none")] + pub event_submission: Option, + /// A millisecond timestamp of when the campaign was created + #[serde(with = "ts_milliseconds")] + pub created: DateTime, + /// A millisecond timestamp representing the time you want this campaign to become active (optional) + /// Used by the AdViewManager & Targeting AIP#31 + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "ts_milliseconds_option" + )] + pub active_from: Option>, + /// A random number to ensure the campaignSpec hash is unique + #[serde(default, skip_serializing_if = "Option::is_none")] + pub nonce: Option, + /// A millisecond timestamp of when the campaign should enter a withdraw period + /// (no longer accept any events other than CHANNEL_CLOSE) + /// A sane value should be lower than channel.validUntil * 1000 and higher than created + /// It's recommended to set this at least one month prior to channel.validUntil * 1000 + #[serde(with = "ts_milliseconds")] + pub withdraw_period_start: DateTime, + /// An array of AdUnit (optional) + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub ad_units: Vec, + #[serde(default)] + pub targeting_rules: Rules, +} + +mod pricing { + use crate::BigNum; + use serde::{Deserialize, Serialize}; + + #[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] + pub struct Pricing { + pub max: BigNum, + pub min: BigNum, + } + + #[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] + #[serde(rename_all = "UPPERCASE")] + pub struct PricingBounds { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub impression: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub click: Option, + } + + impl PricingBounds { + pub fn to_vec(&self) -> Vec<(&str, Pricing)> { + let mut vec = Vec::new(); + + if let Some(pricing) = self.impression.as_ref() { + vec.push(("IMPRESSION", pricing.clone())); + } + + if let Some(pricing) = self.click.as_ref() { + vec.push(("CLICK", pricing.clone())) + } + + vec + } + + pub fn get(&self, event_type: &str) -> Option<&Pricing> { + match event_type { + "IMPRESSION" => self.impression.as_ref(), + "CLICK" => self.click.as_ref(), + _ => None, + } + } + } +} +// TODO: Move SpecValidators (spec::Validators?) + +// TODO: Postgres Campaign +// TODO: Postgres CampaignSpec diff --git a/primitives/src/channel_v5.rs b/primitives/src/channel_v5.rs new file mode 100644 index 000000000..5bb40af91 --- /dev/null +++ b/primitives/src/channel_v5.rs @@ -0,0 +1,15 @@ +use serde::{Serialize, Deserialize}; + +use crate::{ValidatorId as Address, BigNum, ChannelId, ValidatorId}; +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct Channel { + pub id: ChannelId, + pub leader: ValidatorId, + pub follower: ValidatorId, + pub guardian: Address, + pub token: Address, + pub nonce: BigNum, +} + +// TODO: Postgres Channel diff --git a/primitives/src/lib.rs b/primitives/src/lib.rs index fe1a6dc7a..2c923cfee 100644 --- a/primitives/src/lib.rs +++ b/primitives/src/lib.rs @@ -8,6 +8,8 @@ mod ad_unit; pub mod adapter; pub mod balances_map; pub mod big_num; +pub mod campaign; +pub mod channel_v5; pub mod channel; pub mod channel_validator; pub mod config; From a4b2ebb1d3a3ff95721bac502ab172f1a29980a3 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Fri, 12 Mar 2021 18:19:37 +0200 Subject: [PATCH 03/49] primitives - ToHex & address::Address --- primitives/src/address.rs | 125 ++++++++++++++++++++ primitives/src/lib.rs | 44 +++++-- primitives/src/validator.rs | 223 ++++++++++++++---------------------- 3 files changed, 245 insertions(+), 147 deletions(-) create mode 100644 primitives/src/address.rs diff --git a/primitives/src/address.rs b/primitives/src/address.rs new file mode 100644 index 000000000..874e1c1d6 --- /dev/null +++ b/primitives/src/address.rs @@ -0,0 +1,125 @@ +use serde::{Deserialize, Serialize, Serializer}; +use std::fmt; + +use crate::{ToHex, targeting::Value, DomainError, ToETHChecksum}; +use std::convert::TryFrom; + +#[derive(Deserialize, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[serde(transparent)] +pub struct Address( + #[serde( + deserialize_with = "ser::from_str", + serialize_with = "SerHex::::serialize" + )] + [u8; 20], +); + +impl Address { + pub fn as_bytes(&self) -> &[u8; 20] { + &self.0 + } +} + +impl Serialize for Address { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let checksum = self.to_checksum(); + serializer.serialize_str(&checksum) + } +} + +impl fmt::Debug for Address { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Address({})", self.to_hex_prefixed()) + } +} + +impl ToETHChecksum for Address {} + +impl From<&[u8; 20]> for Address { + fn from(bytes: &[u8; 20]) -> Self { + Self(*bytes) + } +} + +impl AsRef<[u8]> for Address { + fn as_ref(&self) -> &[u8] { + &self.0 + } +} + +impl TryFrom<&str> for Address { + type Error = DomainError; + fn try_from(value: &str) -> Result { + let hex_value = match value { + value if value.len() == 42 => Ok(&value[2..]), + value if value.len() == 40 => Ok(value), + _ => Err(DomainError::InvalidArgument( + "invalid validator id length".to_string(), + )), + }?; + + let result = hex::decode(hex_value).map_err(|_| { + DomainError::InvalidArgument("Failed to deserialize validator id".to_string()) + })?; + + if result.len() != 20 { + return Err(DomainError::InvalidArgument(format!( + "Invalid validator id value {}", + value + ))); + } + + let mut id: [u8; 20] = [0; 20]; + id.copy_from_slice(&result[..]); + Ok(Self(id)) + } +} + +impl TryFrom<&String> for Address { + type Error = DomainError; + + fn try_from(value: &String) -> Result { + Address::try_from(value.as_str()) + } +} + +impl fmt::Display for Address { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.to_checksum()) + } +} + +impl TryFrom for Address { + type Error = DomainError; + + fn try_from(value: Value) -> Result { + let string = value.try_string().map_err(|err| { + DomainError::InvalidArgument(format!("Value is not a string: {}", err)) + })?; + + Self::try_from(&string) + } +} + + +mod ser { + use hex::FromHex; + use serde::{Deserialize, Deserializer}; + + pub(super) fn from_str<'de, D>(deserializer: D) -> Result<[u8; 20], D::Error> + where + D: Deserializer<'de>, + { + let validator_id = String::deserialize(deserializer)?; + if validator_id.is_empty() || validator_id.len() != 42 { + return Err(serde::de::Error::custom( + "invalid validator id length".to_string(), + )); + } + + <[u8; 20] as FromHex>::from_hex(&validator_id[2..]).map_err(serde::de::Error::custom) + } +} diff --git a/primitives/src/lib.rs b/primitives/src/lib.rs index 2c923cfee..76b6e77aa 100644 --- a/primitives/src/lib.rs +++ b/primitives/src/lib.rs @@ -1,10 +1,22 @@ #![deny(rust_2018_idioms)] #![deny(clippy::all)] -use std::error; -use std::fmt; +use std::{error, fmt}; +pub use self::{ + address::Address, + ad_slot::AdSlot, + ad_unit::AdUnit, + balances_map::BalancesMap, + big_num::BigNum, + channel::{Channel, ChannelId, ChannelSpec, SpecValidator, SpecValidators}, + config::Config, + event_submission::EventSubmission, + ipfs::IPFS, + validator::{ValidatorDesc, ValidatorId}, +}; mod ad_slot; mod ad_unit; +pub mod address; pub mod adapter; pub mod balances_map; pub mod big_num; @@ -44,16 +56,6 @@ pub mod analytics; mod eth_checksum; pub mod validator; -pub use self::ad_slot::AdSlot; -pub use self::ad_unit::AdUnit; -pub use self::balances_map::BalancesMap; -pub use self::big_num::BigNum; -pub use self::channel::{Channel, ChannelId, ChannelSpec, SpecValidator, SpecValidators}; -pub use self::config::Config; -pub use self::event_submission::EventSubmission; -pub use self::ipfs::IPFS; -pub use self::validator::{ValidatorDesc, ValidatorId}; - #[derive(Debug, PartialEq, Eq)] pub enum DomainError { InvalidArgument(String), @@ -84,3 +86,21 @@ pub trait ToETHChecksum: AsRef<[u8]> { } impl ToETHChecksum for &[u8; 20] {} + +pub trait ToHex { + // Hex encoded `String`, **without** __Checksum__ming the string + fn to_hex(&self) -> String; + + // Hex encoded `0x` prefixed `String`, **without** __Checksum__ming the string + fn to_hex_prefixed(&self) -> String; +} + +impl> ToHex for T { + fn to_hex(&self) -> String { + hex::encode(self.as_ref()) + } + + fn to_hex_prefixed(&self) -> String { + format!("0x{}", self.as_ref().to_hex()) + } +} diff --git a/primitives/src/validator.rs b/primitives/src/validator.rs index 0024665d9..791415725 100644 --- a/primitives/src/validator.rs +++ b/primitives/src/validator.rs @@ -1,106 +1,58 @@ -use chrono::{DateTime, Utc}; -use hex::FromHex; -use serde::{Deserialize, Deserializer, Serialize, Serializer}; -use std::fmt; +use serde::{Deserialize, Serialize}; +use std::{convert::TryFrom, fmt}; -use crate::{targeting::Value, BalancesMap, BigNum, DomainError, ToETHChecksum}; -use std::convert::TryFrom; +use crate::{targeting::Value, Address, BigNum, DomainError, ToETHChecksum, ToHex}; -#[derive(Deserialize, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub use messages::*; + +#[derive(Serialize, Deserialize, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[serde(transparent)] -pub struct ValidatorId( - #[serde( - deserialize_with = "validator_id_from_str", - serialize_with = "SerHex::::serialize" - )] - [u8; 20], -); +pub struct ValidatorId(Address); impl fmt::Debug for ValidatorId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "ValidatorId({})", self.to_hex_prefix_string()) - } -} - -fn validator_id_from_str<'de, D>(deserializer: D) -> Result<[u8; 20], D::Error> -where - D: Deserializer<'de>, -{ - let validator_id = String::deserialize(deserializer)?; - if validator_id.is_empty() || validator_id.len() != 42 { - return Err(serde::de::Error::custom( - "invalid validator id length".to_string(), - )); + write!(f, "ValidatorId({})", self.to_hex_prefixed()) } - - <[u8; 20] as FromHex>::from_hex(&validator_id[2..]).map_err(serde::de::Error::custom) } impl ValidatorId { pub fn inner(&self) -> &[u8; 20] { - &self.0 + &self.0.as_bytes() } /// To Hex non-`0x` prefixed string without **Checksum**ing the string + /// For backwards compatibility + /// TODO: Remove once we change all places this method is used at pub fn to_hex_non_prefix_string(&self) -> String { - hex::encode(self.0) + self.0.to_hex() } /// To Hex `0x` prefixed string **without** __Checksum__ing the string + /// For backwards compatibility + /// TODO: Remove once we change all places this method is used at pub fn to_hex_prefix_string(&self) -> String { - format!("0x{}", self.to_hex_non_prefix_string()) + self.0.to_hex_prefixed() } } impl ToETHChecksum for ValidatorId {} -impl Serialize for ValidatorId { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - let checksum = self.to_checksum(); - serializer.serialize_str(&checksum) - } -} - impl From<&[u8; 20]> for ValidatorId { fn from(bytes: &[u8; 20]) -> Self { - Self(*bytes) + Self(Address::from(bytes)) } } impl AsRef<[u8]> for ValidatorId { fn as_ref(&self) -> &[u8] { - &self.0 + &self.0.as_ref() } } impl TryFrom<&str> for ValidatorId { type Error = DomainError; fn try_from(value: &str) -> Result { - let hex_value = match value { - value if value.len() == 42 => Ok(&value[2..]), - value if value.len() == 40 => Ok(value), - _ => Err(DomainError::InvalidArgument( - "invalid validator id length".to_string(), - )), - }?; - - let result = hex::decode(hex_value).map_err(|_| { - DomainError::InvalidArgument("Failed to deserialize validator id".to_string()) - })?; - - if result.len() != 20 { - return Err(DomainError::InvalidArgument(format!( - "Invalid validator id value {}", - value - ))); - } - - let mut id: [u8; 20] = [0; 20]; - id.copy_from_slice(&result[..]); - Ok(Self(id)) + Address::try_from(value).map(Self) } } @@ -108,7 +60,7 @@ impl TryFrom<&String> for ValidatorId { type Error = DomainError; fn try_from(value: &String) -> Result { - ValidatorId::try_from(value.as_str()) + Address::try_from(value).map(Self) } } @@ -122,11 +74,7 @@ impl TryFrom for ValidatorId { type Error = DomainError; fn try_from(value: Value) -> Result { - let string = value.try_string().map_err(|err| { - DomainError::InvalidArgument(format!("Value is not a string: {}", err)) - })?; - - Self::try_from(&string) + Address::try_from(value).map(Self) } } @@ -142,73 +90,78 @@ pub struct ValidatorDesc { // Validator Message Types -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct Accounting { - #[serde(rename = "lastEvAggr")] - pub last_event_aggregate: DateTime, - pub balances_before_fees: BalancesMap, - pub balances: BalancesMap, -} - -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct ApproveState { - pub state_root: String, - pub signature: String, - pub is_healthy: bool, - #[serde(default)] - pub exhausted: bool, -} - -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct NewState { - pub state_root: String, - pub signature: String, - pub balances: BalancesMap, - #[serde(default)] - pub exhausted: bool, -} - -#[derive(Default, Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct RejectState { - pub reason: String, - pub state_root: String, - pub signature: String, - pub balances: Option, - pub timestamp: Option>, -} - -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct Heartbeat { - pub signature: String, - pub state_root: String, - pub timestamp: DateTime, -} - -impl Heartbeat { - pub fn new(signature: String, state_root: String) -> Self { - Self { - signature, - state_root, - timestamp: Utc::now(), +mod messages { + use chrono::{DateTime, Utc}; + use serde::{Serialize, Deserialize}; + use crate::BalancesMap; + + #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] + #[serde(rename_all = "camelCase")] + pub struct Accounting { + #[serde(rename = "lastEvAggr")] + pub last_event_aggregate: DateTime, + pub balances_before_fees: BalancesMap, + pub balances: BalancesMap, + } + + #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] + #[serde(rename_all = "camelCase")] + pub struct ApproveState { + pub state_root: String, + pub signature: String, + pub is_healthy: bool, + #[serde(default)] + pub exhausted: bool, + } + + #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] + #[serde(rename_all = "camelCase")] + pub struct NewState { + pub state_root: String, + pub signature: String, + pub balances: BalancesMap, + #[serde(default)] + pub exhausted: bool, + } + + #[derive(Default, Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] + #[serde(rename_all = "camelCase")] + pub struct RejectState { + pub reason: String, + pub state_root: String, + pub signature: String, + pub balances: Option, + pub timestamp: Option>, + } + + #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] + #[serde(rename_all = "camelCase")] + pub struct Heartbeat { + pub signature: String, + pub state_root: String, + pub timestamp: DateTime, + } + + impl Heartbeat { + pub fn new(signature: String, state_root: String) -> Self { + Self { + signature, + state_root, + timestamp: Utc::now(), + } } } -} -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -#[serde(tag = "type")] -pub enum MessageTypes { - ApproveState(ApproveState), - NewState(NewState), - RejectState(RejectState), - Heartbeat(Heartbeat), - Accounting(Accounting), + #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] + #[serde(tag = "type")] + pub enum MessageTypes { + ApproveState(ApproveState), + NewState(NewState), + RejectState(RejectState), + Heartbeat(Heartbeat), + Accounting(Accounting), + } } - #[cfg(feature = "postgres")] pub mod postgres { use super::ValidatorId; From e5ed95967a96b2e7a79cf4126c9edb10b932fc11 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Mon, 15 Mar 2021 11:34:41 +0200 Subject: [PATCH 04/49] primitives - Address - improvements --- primitives/src/address.rs | 106 ++++++++++++++++++++---------------- primitives/src/validator.rs | 10 ++-- 2 files changed, 65 insertions(+), 51 deletions(-) diff --git a/primitives/src/address.rs b/primitives/src/address.rs index 874e1c1d6..dba0c1fe3 100644 --- a/primitives/src/address.rs +++ b/primitives/src/address.rs @@ -1,14 +1,25 @@ +use hex::{FromHex, FromHexError}; use serde::{Deserialize, Serialize, Serializer}; -use std::fmt; - -use crate::{ToHex, targeting::Value, DomainError, ToETHChecksum}; -use std::convert::TryFrom; +use std::{convert::TryFrom, fmt}; +use thiserror::Error; + +use crate::{targeting::Value, DomainError, ToETHChecksum, ToHex}; + +#[derive(Debug, Error)] +pub enum Error { + #[error("Expected prefix `0x`")] + BadPrefix, + #[error("Expected length of 40 without or 42 with a `0x` prefix")] + Length, + #[error("Invalid hex")] + Hex(#[from] FromHexError), +} #[derive(Deserialize, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[serde(transparent)] pub struct Address( #[serde( - deserialize_with = "ser::from_str", + deserialize_with = "de::from_bytes_insensitive", serialize_with = "SerHex::::serialize" )] [u8; 20], @@ -30,6 +41,12 @@ impl Serialize for Address { } } +impl fmt::Display for Address { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.to_checksum()) + } +} + impl fmt::Debug for Address { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Address({})", self.to_hex_prefixed()) @@ -51,44 +68,18 @@ impl AsRef<[u8]> for Address { } impl TryFrom<&str> for Address { - type Error = DomainError; - fn try_from(value: &str) -> Result { - let hex_value = match value { - value if value.len() == 42 => Ok(&value[2..]), - value if value.len() == 40 => Ok(value), - _ => Err(DomainError::InvalidArgument( - "invalid validator id length".to_string(), - )), - }?; - - let result = hex::decode(hex_value).map_err(|_| { - DomainError::InvalidArgument("Failed to deserialize validator id".to_string()) - })?; + type Error = Error; - if result.len() != 20 { - return Err(DomainError::InvalidArgument(format!( - "Invalid validator id value {}", - value - ))); - } - - let mut id: [u8; 20] = [0; 20]; - id.copy_from_slice(&result[..]); - Ok(Self(id)) + fn try_from(value: &str) -> Result { + Ok(Self(from_bytes(value, Prefix::Insensitive)?)) } } impl TryFrom<&String> for Address { - type Error = DomainError; + type Error = Error; fn try_from(value: &String) -> Result { - Address::try_from(value.as_str()) - } -} - -impl fmt::Display for Address { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.to_checksum()) + Self::try_from(value.as_str()) } } @@ -100,26 +91,47 @@ impl TryFrom for Address { DomainError::InvalidArgument(format!("Value is not a string: {}", err)) })?; - Self::try_from(&string) + Self::try_from(&string).map_err(|err| DomainError::InvalidArgument(err.to_string())) } } - -mod ser { - use hex::FromHex; +mod de { + use super::{from_bytes, Prefix}; use serde::{Deserialize, Deserializer}; - pub(super) fn from_str<'de, D>(deserializer: D) -> Result<[u8; 20], D::Error> + /// Deserializes the bytes with our without a `0x` prefix (insensitive) + pub(super) fn from_bytes_insensitive<'de, D>(deserializer: D) -> Result<[u8; 20], D::Error> where D: Deserializer<'de>, { let validator_id = String::deserialize(deserializer)?; - if validator_id.is_empty() || validator_id.len() != 42 { - return Err(serde::de::Error::custom( - "invalid validator id length".to_string(), - )); - } - <[u8; 20] as FromHex>::from_hex(&validator_id[2..]).map_err(serde::de::Error::custom) + from_bytes(validator_id, Prefix::Insensitive).map_err(serde::de::Error::custom) + } +} + +pub enum Prefix { + // with `0x` prefix + With, + // without `0x` prefix + Without, + /// Insensitive to a `0x` prefixed, it allows values with or without a prefix + Insensitive, +} + +pub fn from_bytes>(from: T, prefix: Prefix) -> Result<[u8; 20], Error> { + let bytes = from.as_ref(); + + let from_hex = + |hex_bytes: &[u8]| <[u8; 20] as FromHex>::from_hex(hex_bytes).map_err(Error::Hex); + + // this length check guards against `panic!` when we call `slice.split_at()` + match (prefix, bytes.len()) { + (Prefix::With, 42) | (Prefix::Insensitive, 42) => match bytes.split_at(2) { + (b"0x", hex_bytes) => from_hex(hex_bytes), + _ => Err(Error::BadPrefix), + }, + (Prefix::Without, 40) | (Prefix::Insensitive, 40) => from_hex(bytes), + _ => Err(Error::Length), } } diff --git a/primitives/src/validator.rs b/primitives/src/validator.rs index 791415725..75fdafa47 100644 --- a/primitives/src/validator.rs +++ b/primitives/src/validator.rs @@ -52,7 +52,9 @@ impl AsRef<[u8]> for ValidatorId { impl TryFrom<&str> for ValidatorId { type Error = DomainError; fn try_from(value: &str) -> Result { - Address::try_from(value).map(Self) + Address::try_from(value) + .map_err(|err| DomainError::InvalidArgument(err.to_string())) + .map(Self) } } @@ -60,7 +62,7 @@ impl TryFrom<&String> for ValidatorId { type Error = DomainError; fn try_from(value: &String) -> Result { - Address::try_from(value).map(Self) + Self::try_from(value.as_str()) } } @@ -91,9 +93,9 @@ pub struct ValidatorDesc { // Validator Message Types mod messages { - use chrono::{DateTime, Utc}; - use serde::{Serialize, Deserialize}; use crate::BalancesMap; + use chrono::{DateTime, Utc}; + use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(rename_all = "camelCase")] From fc5f7e9b6fd25cd96fcd74290e48ce830306eaf2 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Mon, 15 Mar 2021 11:44:12 +0200 Subject: [PATCH 05/49] primitives - channel v5 - use Address --- primitives/src/channel_v5.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/primitives/src/channel_v5.rs b/primitives/src/channel_v5.rs index 5bb40af91..dbdc234a9 100644 --- a/primitives/src/channel_v5.rs +++ b/primitives/src/channel_v5.rs @@ -1,6 +1,6 @@ -use serde::{Serialize, Deserialize}; +use serde::{Deserialize, Serialize}; -use crate::{ValidatorId as Address, BigNum, ChannelId, ValidatorId}; +use crate::{BigNum, ChannelId, ValidatorId, Address}; #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] #[serde(rename_all = "camelCase")] pub struct Channel { From 4a803abd97b41f0cc6c63e9499609ae85bd5cb59 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Mon, 15 Mar 2021 13:15:50 +0200 Subject: [PATCH 06/49] primitives - campaign - spec Validators and matching --- primitives/src/campaign.rs | 148 +++++++++++++++++++++++++++++++++++-- 1 file changed, 143 insertions(+), 5 deletions(-) diff --git a/primitives/src/campaign.rs b/primitives/src/campaign.rs index 0d671ca94..7ce2b0335 100644 --- a/primitives/src/campaign.rs +++ b/primitives/src/campaign.rs @@ -1,4 +1,6 @@ -use crate::{channel_v5::Channel, targeting::Rules, AdUnit, BigNum, EventSubmission, SpecValidators}; +use crate::{ + channel_v5::Channel, targeting::Rules, AdUnit, BigNum, EventSubmission, ValidatorDesc, +}; use chrono::{ serde::{ts_milliseconds, ts_milliseconds_option}, @@ -7,18 +9,41 @@ use chrono::{ use serde::{Deserialize, Serialize}; pub use pricing::{Pricing, PricingBounds}; +pub use spec::{ValidatorRole, Validators}; #[derive(Debug, Serialize, Deserialize)] pub struct Campaign { - channel: Channel, - spec: CampaignSpec, + pub channel: Channel, + pub spec: CampaignSpec, +} + +impl Campaign { + /// Matches the Channel.leader to the Campaign.spec.leader + /// If they match it returns `Some`, otherwise, it returns `None` + pub fn leader<'a>(&'a self) -> Option<&'a ValidatorDesc> { + if self.channel.leader == self.spec.validators.leader().id { + Some(self.spec.validators.leader()) + } else { + None + } + } + + /// Matches the Channel.follower to the Campaign.spec.follower + /// If they match it returns `Some`, otherwise, it returns `None` + pub fn follower<'a>(&'a self) -> Option<&'a ValidatorDesc> { + if self.channel.follower == self.spec.validators.follower().id { + Some(self.spec.validators.follower()) + } else { + None + } + } } #[derive(Debug, Serialize, Deserialize)] pub struct CampaignSpec { #[serde(default, skip_serializing_if = "Option::is_none")] pub title: Option, - pub validators: SpecValidators, + pub validators: Validators, /// Event pricing bounds #[serde(default, skip_serializing_if = "Option::is_none")] pub pricing_bounds: Option, @@ -95,7 +120,120 @@ mod pricing { } } } -// TODO: Move SpecValidators (spec::Validators?) +// TODO: Double check if we require all the methods and enums, as some parts are now in the `Campaign` +// This includes the matching of the Channel leader & follower to the Spec Validators +pub mod spec { + use crate::{ValidatorDesc, ValidatorId}; + use serde::{Deserialize, Serialize}; + + #[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] + /// A (leader, follower) tuple + pub struct Validators(ValidatorDesc, ValidatorDesc); + + #[derive(Debug)] + pub enum ValidatorRole<'a> { + Leader(&'a ValidatorDesc), + Follower(&'a ValidatorDesc), + } + + impl<'a> ValidatorRole<'a> { + pub fn validator(&self) -> &'a ValidatorDesc { + match self { + ValidatorRole::Leader(validator) => validator, + ValidatorRole::Follower(validator) => validator, + } + } + } + + impl Validators { + pub fn new(leader: ValidatorDesc, follower: ValidatorDesc) -> Self { + Self(leader, follower) + } + + pub fn leader(&self) -> &ValidatorDesc { + &self.0 + } + + pub fn follower(&self) -> &ValidatorDesc { + &self.1 + } + + pub fn find(&self, validator_id: &ValidatorId) -> Option> { + if &self.leader().id == validator_id { + Some(ValidatorRole::Leader(&self.leader())) + } else if &self.follower().id == validator_id { + Some(ValidatorRole::Follower(&self.follower())) + } else { + None + } + } + + pub fn find_index(&self, validator_id: &ValidatorId) -> Option { + if &self.leader().id == validator_id { + Some(0) + } else if &self.follower().id == validator_id { + Some(1) + } else { + None + } + } + + pub fn iter(&self) -> Iter<'_> { + Iter::new(&self) + } + } + + impl From<(ValidatorDesc, ValidatorDesc)> for Validators { + fn from((leader, follower): (ValidatorDesc, ValidatorDesc)) -> Self { + Self(leader, follower) + } + } + + /// Fixed size iterator of 2, as we need an iterator in couple of occasions + impl<'a> IntoIterator for &'a Validators { + type Item = &'a ValidatorDesc; + type IntoIter = Iter<'a>; + + fn into_iter(self) -> Self::IntoIter { + self.iter() + } + } + + pub struct Iter<'a> { + validators: &'a Validators, + index: u8, + } + + impl<'a> Iter<'a> { + fn new(validators: &'a Validators) -> Self { + Self { + validators, + index: 0, + } + } + } + + impl<'a> Iterator for Iter<'a> { + type Item = &'a ValidatorDesc; + + fn next(&mut self) -> Option { + match self.index { + 0 => { + self.index += 1; + + Some(self.validators.leader()) + } + 1 => { + self.index += 1; + + Some(self.validators.follower()) + } + _ => None, + } + } + } +} // TODO: Postgres Campaign // TODO: Postgres CampaignSpec +// TODO: Postgres Validators From fa4f084ec52fb30b66b56cb7f8c20830973caedf Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Mon, 15 Mar 2021 13:26:22 +0200 Subject: [PATCH 07/49] rustfmt --- primitives/src/lib.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/primitives/src/lib.rs b/primitives/src/lib.rs index 76b6e77aa..4e202f2ca 100644 --- a/primitives/src/lib.rs +++ b/primitives/src/lib.rs @@ -1,10 +1,9 @@ #![deny(rust_2018_idioms)] #![deny(clippy::all)] -use std::{error, fmt}; pub use self::{ - address::Address, ad_slot::AdSlot, ad_unit::AdUnit, + address::Address, balances_map::BalancesMap, big_num::BigNum, channel::{Channel, ChannelId, ChannelSpec, SpecValidator, SpecValidators}, @@ -13,16 +12,17 @@ pub use self::{ ipfs::IPFS, validator::{ValidatorDesc, ValidatorId}, }; +use std::{error, fmt}; mod ad_slot; mod ad_unit; -pub mod address; pub mod adapter; +pub mod address; pub mod balances_map; pub mod big_num; pub mod campaign; -pub mod channel_v5; pub mod channel; +pub mod channel_v5; pub mod channel_validator; pub mod config; pub mod event_submission; From defa9ab485ef54edfd63b2b841d2ad8dee763974 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Tue, 16 Mar 2021 11:38:01 +0200 Subject: [PATCH 08/49] primitives - campaign - merge spec and change fields --- primitives/Cargo.toml | 2 +- primitives/src/campaign.rs | 86 +++++++++++++++++++------------------- 2 files changed, 45 insertions(+), 43 deletions(-) diff --git a/primitives/Cargo.toml b/primitives/Cargo.toml index 5c4c4758c..34e57549d 100644 --- a/primitives/Cargo.toml +++ b/primitives/Cargo.toml @@ -13,7 +13,7 @@ serde = { version = "^1.0", features = ['derive'] } serde_json = "1.0" serde-hex = "0.1.0" serde_millis = "0.1.1" -# Used prefixes on field for targeting::Input +# Used prefixes on field for targeting::Input, and `campaign::Active` serde_with = "1.6" # Configuration toml = "0.5" diff --git a/primitives/src/campaign.rs b/primitives/src/campaign.rs index 7ce2b0335..30acb3baa 100644 --- a/primitives/src/campaign.rs +++ b/primitives/src/campaign.rs @@ -1,5 +1,5 @@ use crate::{ - channel_v5::Channel, targeting::Rules, AdUnit, BigNum, EventSubmission, ValidatorDesc, + channel_v5::Channel, targeting::Rules, AdUnit, Address, EventSubmission, ValidatorDesc, }; use chrono::{ @@ -7,74 +7,76 @@ use chrono::{ DateTime, Utc, }; use serde::{Deserialize, Serialize}; +use serde_with::with_prefix; pub use pricing::{Pricing, PricingBounds}; -pub use spec::{ValidatorRole, Validators}; +pub use validators::{ValidatorRole, Validators}; + +with_prefix!(prefix_active "active_"); #[derive(Debug, Serialize, Deserialize)] pub struct Campaign { pub channel: Channel, - pub spec: CampaignSpec, -} - -impl Campaign { - /// Matches the Channel.leader to the Campaign.spec.leader - /// If they match it returns `Some`, otherwise, it returns `None` - pub fn leader<'a>(&'a self) -> Option<&'a ValidatorDesc> { - if self.channel.leader == self.spec.validators.leader().id { - Some(self.spec.validators.leader()) - } else { - None - } - } - - /// Matches the Channel.follower to the Campaign.spec.follower - /// If they match it returns `Some`, otherwise, it returns `None` - pub fn follower<'a>(&'a self) -> Option<&'a ValidatorDesc> { - if self.channel.follower == self.spec.validators.follower().id { - Some(self.spec.validators.follower()) - } else { - None - } - } -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct CampaignSpec { + pub creator: Address, + pub validators: Validators, #[serde(default, skip_serializing_if = "Option::is_none")] pub title: Option, - pub validators: Validators, /// Event pricing bounds #[serde(default, skip_serializing_if = "Option::is_none")] pub pricing_bounds: Option, /// EventSubmission object, applies to event submission (POST /channel/:id/events) #[serde(default, skip_serializing_if = "Option::is_none")] pub event_submission: Option, + /// An array of AdUnit (optional) + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub ad_units: Vec, + #[serde(default)] + pub targeting_rules: Rules, /// A millisecond timestamp of when the campaign was created #[serde(with = "ts_milliseconds")] pub created: DateTime, /// A millisecond timestamp representing the time you want this campaign to become active (optional) /// Used by the AdViewManager & Targeting AIP#31 + #[serde(flatten, with = "prefix_active")] + pub active: Active, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct Active { #[serde( default, skip_serializing_if = "Option::is_none", with = "ts_milliseconds_option" )] - pub active_from: Option>, - /// A random number to ensure the campaignSpec hash is unique - #[serde(default, skip_serializing_if = "Option::is_none")] - pub nonce: Option, + pub from: Option>, /// A millisecond timestamp of when the campaign should enter a withdraw period /// (no longer accept any events other than CHANNEL_CLOSE) /// A sane value should be lower than channel.validUntil * 1000 and higher than created /// It's recommended to set this at least one month prior to channel.validUntil * 1000 #[serde(with = "ts_milliseconds")] - pub withdraw_period_start: DateTime, - /// An array of AdUnit (optional) - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub ad_units: Vec, - #[serde(default)] - pub targeting_rules: Rules, + pub active_to: DateTime, +} + +impl Campaign { + /// Matches the Channel.leader to the Campaign.spec.leader + /// If they match it returns `Some`, otherwise, it returns `None` + pub fn leader<'a>(&'a self) -> Option<&'a ValidatorDesc> { + if self.channel.leader == self.validators.leader().id { + Some(self.validators.leader()) + } else { + None + } + } + + /// Matches the Channel.follower to the Campaign.spec.follower + /// If they match it returns `Some`, otherwise, it returns `None` + pub fn follower<'a>(&'a self) -> Option<&'a ValidatorDesc> { + if self.channel.follower == self.validators.follower().id { + Some(self.validators.follower()) + } else { + None + } + } } mod pricing { @@ -121,8 +123,8 @@ mod pricing { } } // TODO: Double check if we require all the methods and enums, as some parts are now in the `Campaign` -// This includes the matching of the Channel leader & follower to the Spec Validators -pub mod spec { +// This includes the matching of the Channel leader & follower to the Validators +pub mod validators { use crate::{ValidatorDesc, ValidatorId}; use serde::{Deserialize, Serialize}; From 9ae51cc0e31f65ab5761cdd526ef36ce98f32033 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Tue, 16 Mar 2021 15:05:48 +0200 Subject: [PATCH 09/49] primitives - Cargo - update deps and remove duplicates --- Cargo.lock | 22 +++++++++++----------- primitives/Cargo.toml | 8 +++----- primitives/src/campaign.rs | 4 ++-- primitives/src/eth_checksum.rs | 32 ++++++++++++++++++++++++++++---- primitives/src/merkle_tree.rs | 14 +++++++------- validator_worker/Cargo.toml | 2 +- 6 files changed, 52 insertions(+), 30 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 28b7bdb5d..a669e5708 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2232,9 +2232,9 @@ dependencies = [ [[package]] name = "num" -version = "0.3.1" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b7a8e9be5e039e2ff869df49155f1c06bd01ade2117ec783e56ab0932b67a8f" +checksum = "43db66d1170d347f9a065114077f7dccb00c1b9478c89384490a3425279a4606" dependencies = [ "num-bigint", "num-complex", @@ -2246,9 +2246,9 @@ dependencies = [ [[package]] name = "num-bigint" -version = "0.3.1" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e9a41747ae4633fce5adffb4d2e81ffc5e89593cb19917f8fb2cc5ff76507bf" +checksum = "4e0d047c1062aa51e256408c560894e5251f08925980e53cf1aa5bd00eec6512" dependencies = [ "autocfg 1.0.1", "num-integer", @@ -2258,11 +2258,12 @@ dependencies = [ [[package]] name = "num-complex" -version = "0.3.1" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "747d632c0c558b87dbabbe6a82f3b4ae03720d0646ac5b7b4dae89394be5f2c5" +checksum = "26873667bbbb7c5182d4a37c1add32cdf09f841af72da53318fdb81543c15085" dependencies = [ "num-traits", + "serde", ] [[package]] @@ -2299,14 +2300,15 @@ dependencies = [ [[package]] name = "num-rational" -version = "0.3.2" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12ac428b1cb17fce6f731001d307d351ec70a6d202fc2e60f7d4c5e42d8f4f07" +checksum = "d41702bd167c2df5520b384281bc111a4b5efcf7fbc4c9c222c815b07e0a6a6a" dependencies = [ "autocfg 1.0.1", "num-bigint", "num-integer", "num-traits", + "serde", ] [[package]] @@ -2766,14 +2768,12 @@ dependencies = [ "lazy_static", "merkletree", "num", - "num-bigint", "num-derive", "num-traits", "parse-display", "postgres-types 0.2.0", "pretty_assertions", "rand 0.8.2", - "rust-crypto", "serde", "serde-hex", "serde_json", @@ -2784,7 +2784,7 @@ dependencies = [ "slog-term", "thiserror", "time 0.1.43", - "tiny-keccak 1.5.0", + "tiny-keccak 2.0.2", "tokio-postgres 0.7.0", "toml", "url", diff --git a/primitives/Cargo.toml b/primitives/Cargo.toml index 34e57549d..136de67bd 100644 --- a/primitives/Cargo.toml +++ b/primitives/Cargo.toml @@ -31,12 +31,10 @@ parse-display = "^0.4.1" cid = "0.6" hex = "0.4" merkletree = "0.10.0" -tiny-keccak = "1.5" -rust-crypto = "0.2" -url = { version = "=2.2", features = ["serde"]} +tiny-keccak = { version = "^2.0", features = ["keccak"] } +url = { version = "=2.2", features = ["serde"] } # Numbers - BigNum, Numbers, Traits and Derives -num-bigint = { version = "^0.3", features = ["serde"] } -num = "0.3" +num = { version = "0.4", features = ["serde", "num-bigint"] } num-traits = "0.2" num-derive = "0.3" # Fixtures diff --git a/primitives/src/campaign.rs b/primitives/src/campaign.rs index 30acb3baa..3fcbecaeb 100644 --- a/primitives/src/campaign.rs +++ b/primitives/src/campaign.rs @@ -60,7 +60,7 @@ pub struct Active { impl Campaign { /// Matches the Channel.leader to the Campaign.spec.leader /// If they match it returns `Some`, otherwise, it returns `None` - pub fn leader<'a>(&'a self) -> Option<&'a ValidatorDesc> { + pub fn leader(&self) -> Option<&'_ ValidatorDesc> { if self.channel.leader == self.validators.leader().id { Some(self.validators.leader()) } else { @@ -70,7 +70,7 @@ impl Campaign { /// Matches the Channel.follower to the Campaign.spec.follower /// If they match it returns `Some`, otherwise, it returns `None` - pub fn follower<'a>(&'a self) -> Option<&'a ValidatorDesc> { + pub fn follower(&self) -> Option<&'_ ValidatorDesc> { if self.channel.follower == self.validators.follower().id { Some(self.validators.follower()) } else { diff --git a/primitives/src/eth_checksum.rs b/primitives/src/eth_checksum.rs index a23d05d20..20c204e73 100644 --- a/primitives/src/eth_checksum.rs +++ b/primitives/src/eth_checksum.rs @@ -1,12 +1,16 @@ -use crypto::{digest::Digest, sha3::Sha3}; +use tiny_keccak::{Hasher, Keccak}; pub fn checksum(address: &str) -> String { let address = address.trim_start_matches("0x").to_lowercase(); let address_hash = { - let mut hasher = Sha3::keccak256(); - hasher.input(address.as_bytes()); - hasher.result_str() + let mut hasher = Keccak::v256(); + let mut result: [u8; 32] = [0; 32]; + + hasher.update(address.as_bytes()); + hasher.finalize(&mut result); + + hex::encode(result) }; address @@ -26,3 +30,23 @@ pub fn checksum(address: &str) -> String { acc }) } + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn it_checksums() { + let expected_checksum = "0xce07CbB7e054514D590a0262C93070D838bFBA2e"; + + let non_checksummed = expected_checksum.to_lowercase(); + + assert_eq!(expected_checksum, checksum(&non_checksummed)); + + let non_prefixed = non_checksummed + .strip_prefix("0x") + .expect("should have prefix"); + + assert_eq!(expected_checksum, checksum(&non_prefixed)) + } +} diff --git a/primitives/src/merkle_tree.rs b/primitives/src/merkle_tree.rs index 0788560f6..2bcac4a74 100644 --- a/primitives/src/merkle_tree.rs +++ b/primitives/src/merkle_tree.rs @@ -1,9 +1,7 @@ use merkletree::{hash::Algorithm, merkle, merkle::VecStore, proof::Proof}; -use std::fmt; -use std::hash::Hasher; -use std::iter::FromIterator; +use std::{fmt, iter::FromIterator}; use thiserror::Error; -use tiny_keccak::Keccak; +use tiny_keccak::{Hasher, Keccak}; #[derive(Clone)] struct KeccakAlgorithm(Keccak); @@ -16,7 +14,7 @@ impl fmt::Debug for KeccakAlgorithm { impl KeccakAlgorithm { pub fn new() -> KeccakAlgorithm { - KeccakAlgorithm(Keccak::new_keccak256()) + KeccakAlgorithm(Keccak::v256()) } } @@ -26,7 +24,7 @@ impl Default for KeccakAlgorithm { } } -impl Hasher for KeccakAlgorithm { +impl std::hash::Hasher for KeccakAlgorithm { #[inline] fn write(&mut self, msg: &[u8]) { self.0.update(msg) @@ -50,7 +48,7 @@ impl Algorithm for KeccakAlgorithm { #[inline] fn reset(&mut self) { - self.0 = Keccak::new_keccak256() + self.0 = Keccak::v256() } fn leaf(&mut self, leaf: MerkleItem) -> MerkleItem { @@ -58,6 +56,8 @@ impl Algorithm for KeccakAlgorithm { } fn node(&mut self, left: MerkleItem, right: MerkleItem, _height: usize) -> MerkleItem { + use std::hash::Hasher; + // This is a check for odd number of leaves items // left == right since the right is a duplicate of left // return the item unencoded as the JS impl diff --git a/validator_worker/Cargo.toml b/validator_worker/Cargo.toml index 0291e2a3c..8111aa359 100644 --- a/validator_worker/Cargo.toml +++ b/validator_worker/Cargo.toml @@ -13,7 +13,7 @@ path = "src/lib.rs" primitives = { path = "../primitives" } adapter = { version = "0.1", path = "../adapter" } chrono = { version = "0.4", features = ["serde"] } -num = "0.3" +num = "0.4" num-traits = "0.2" # To/From Hex hex = "0.4" From 98790b582d37aa2e69f357b1cf4c23f0d464ac29 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Tue, 16 Mar 2021 17:46:23 +0200 Subject: [PATCH 10/49] primitives - channel_v5 - `Nonce` & Channel `fn id()` --- Cargo.lock | 117 +++++++++++++++++++++++++++++++++-- primitives/Cargo.toml | 4 ++ primitives/src/channel_v5.rs | 111 ++++++++++++++++++++++++++++++++- 3 files changed, 223 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a669e5708..67d278194 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -407,7 +407,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41262f11d771fd4a61aa3ce019fca363b4b6c282fca9da2a31186d3965a47a5c" dependencies = [ "either", - "radium", + "radium 0.3.0", +] + +[[package]] +name = "bitvec" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f682656975d3a682daff957be4ddeb65d6ad656737cd821f2d00685ae466af1" +dependencies = [ + "funty", + "radium 0.6.2", + "tap", + "wyz", ] [[package]] @@ -539,6 +551,12 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b0a5e3906bcbf133e33c1d4d95afc664ad37fbdb9f6568d8043e7ea8c27d93d3" +[[package]] +name = "byte-slice-cast" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65c1bf4a04a88c54f589125563643d773f3254b5c38571395e2b591c693bbc81" + [[package]] name = "byte-tools" version = "0.2.0" @@ -1099,6 +1117,19 @@ dependencies = [ "tiny-keccak 2.0.2", ] +[[package]] +name = "ethbloom" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "779864b9c7f7ead1f092972c3257496c6a84b46dba2ce131dd8a282cb2cc5972" +dependencies = [ + "crunchy 0.2.2", + "fixed-hash 0.7.0", + "impl-rlp", + "impl-serde", + "tiny-keccak 2.0.2", +] + [[package]] name = "ethereum-types" version = "0.4.2" @@ -1123,7 +1154,21 @@ dependencies = [ "fixed-hash 0.7.0", "impl-rlp", "impl-serde", - "primitive-types", + "primitive-types 0.8.0", + "uint 0.9.0", +] + +[[package]] +name = "ethereum-types" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f64b5df66a228d85e4b17e5d6c6aa43b0310898ffe8a85988c4c032357aaabfd" +dependencies = [ + "ethbloom 0.11.0", + "fixed-hash 0.7.0", + "impl-rlp", + "impl-serde", + "primitive-types 0.9.0", "uint 0.9.0", ] @@ -1294,6 +1339,12 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" +[[package]] +name = "funty" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" + [[package]] name = "futures" version = "0.1.30" @@ -1772,7 +1823,16 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1be51a921b067b0eaca2fad532d9400041561aa922221cc65f95a85641c6bf53" dependencies = [ - "parity-scale-codec", + "parity-scale-codec 1.3.6", +] + +[[package]] +name = "impl-codec" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df170efa359aebdd5cb7fe78edcc67107748e4737bdca8a8fb40d15ea7a877ed" +dependencies = [ + "parity-scale-codec 2.0.1", ] [[package]] @@ -2430,8 +2490,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79602888a81ace83e3d1d4b2873286c1f5f906c84db667594e8db8da3506c383" dependencies = [ "arrayvec 0.5.2", - "bitvec", - "byte-slice-cast", + "bitvec 0.17.4", + "byte-slice-cast 0.3.5", + "serde", +] + +[[package]] +name = "parity-scale-codec" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cd3dab59b5cf4bc81069ade0fc470341a1ef3ad5fa73e5a8943bed2ec12b2e8" +dependencies = [ + "arrayvec 0.5.2", + "bitvec 0.20.2", + "byte-slice-cast 1.0.0", "serde", ] @@ -2748,7 +2820,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b3824ae2c5e27160113b9e029a10ec9e3f0237bad8029f69c7724393c9fdefd8" dependencies = [ "fixed-hash 0.7.0", - "impl-codec", + "impl-codec 0.4.2", + "impl-rlp", + "impl-serde", + "uint 0.9.0", +] + +[[package]] +name = "primitive-types" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2415937401cb030a2a0a4d922483f945fa068f52a7dbb22ce0fe5f2b6f6adace" +dependencies = [ + "fixed-hash 0.7.0", + "impl-codec 0.5.0", "impl-rlp", "impl-serde", "uint 0.9.0", @@ -2762,6 +2847,8 @@ dependencies = [ "bytes 1.0.1", "chrono", "cid", + "ethabi", + "ethereum-types 0.11.0", "fake", "futures 0.3.12", "hex", @@ -2865,6 +2952,12 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "def50a86306165861203e7f84ecffbbdfdea79f0e51039b33de1e952358c47ac" +[[package]] +name = "radium" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "643f8f41a8ebc4c5dc4515c82bb8abd397b527fc20fd681b7c011c2aee5d44fb" + [[package]] name = "rand" version = "0.3.23" @@ -3843,6 +3936,12 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60" +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + [[package]] name = "tempdir" version = "0.3.7" @@ -4617,3 +4716,9 @@ dependencies = [ "winapi 0.2.8", "winapi-build", ] + +[[package]] +name = "wyz" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" diff --git a/primitives/Cargo.toml b/primitives/Cargo.toml index 136de67bd..7fa47099e 100644 --- a/primitives/Cargo.toml +++ b/primitives/Cargo.toml @@ -25,6 +25,10 @@ slog-async = "^2.3.0" thiserror = "^1.0" chrono = { version = "0.4", features = ["serde"] } time = "0.1.42" +# For encoding the Channel to a ChannelId +ethabi = "13.0.0" +# For the nonce U256 +ethereum-types = "0.11" # Macro for easier derive of Display & FromStr parse-display = "^0.4.1" # CID & multihash / multibase diff --git a/primitives/src/channel_v5.rs b/primitives/src/channel_v5.rs index dbdc234a9..9c6ba6259 100644 --- a/primitives/src/channel_v5.rs +++ b/primitives/src/channel_v5.rs @@ -1,15 +1,120 @@ +use ethereum_types::U256; use serde::{Deserialize, Serialize}; +use std::fmt; + +use crate::{Address, ChannelId, ValidatorId}; -use crate::{BigNum, ChannelId, ValidatorId, Address}; #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] #[serde(rename_all = "camelCase")] pub struct Channel { - pub id: ChannelId, pub leader: ValidatorId, pub follower: ValidatorId, pub guardian: Address, pub token: Address, - pub nonce: BigNum, + pub nonce: Nonce, +} + +impl Channel { + pub fn id(&self) -> ChannelId { + use ethabi::{encode, Token}; + use tiny_keccak::{Hasher, Keccak}; + + let tokens = [ + Token::Address(self.leader.as_bytes().into()), + Token::Address(self.follower.as_bytes().into()), + Token::Address(self.guardian.as_bytes().into()), + Token::Address(self.token.as_bytes().into()), + Token::FixedBytes(self.nonce.to_bytes().to_vec()), + ]; + + let mut channel_id = [0_u8; 32]; + let mut hasher = Keccak::v256(); + hasher.update(&encode(&tokens)); + hasher.finalize(&mut channel_id); + + ChannelId::from(channel_id) + } +} + +/// The nonce is an Unsigned 256 number +#[derive(Clone, Copy, PartialEq, Eq)] +pub struct Nonce(pub U256); + +impl Nonce { + /// In Big-Endian + pub fn to_bytes(&self) -> [u8; 32] { + // the impl of From uses BigEndian + self.0.into() + } +} + +impl fmt::Display for Nonce { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(&self.0.to_string()) + } +} + +impl fmt::Debug for Nonce { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Nonce({})", self.0.to_string()) + } +} + +impl From for Nonce { + fn from(value: u64) -> Self { + Self(U256::from(value)) + } +} + +impl From for Nonce { + fn from(value: u32) -> Self { + Self(U256::from(value)) + } +} + +// The U256 implementation deserializes the value from a hex String value with a prefix `0x...` +// This is why we we need to impl it our selves +impl<'de> Deserialize<'de> for Nonce { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let string = String::deserialize(deserializer)?; + + U256::from_dec_str(&string) + .map_err(serde::de::Error::custom) + .map(Nonce) + } +} + +// The U256 implementation serializes the value as a hex String value with a prefix `0x...` +// This is why we we need to impl it our selves +impl Serialize for Nonce { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + self.0.to_string().serialize(serializer) + } +} +#[cfg(test)] +mod test { + use super::*; + use serde_json::{from_value, to_value, Value}; + + #[test] + fn de_serializes_nonce() { + let nonce_str = "12345"; + let json = Value::String(nonce_str.into()); + + let nonce: Nonce = from_value(json.clone()).expect("Should deserialize a Nonce"); + let expected_nonce = Nonce::from(12345_u64); + + assert_eq!(&expected_nonce, &nonce); + assert_eq!(json, to_value(nonce).expect("Should serialize a Nonce")); + assert_eq!(nonce_str, &nonce.to_string()); + assert_eq!("Nonce(12345)", &format!("{:?}", nonce)); + } } // TODO: Postgres Channel From 7ea437b5b1c7503a91af19b1d072e6d4b8068a2d Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Tue, 16 Mar 2021 17:46:51 +0200 Subject: [PATCH 11/49] primitives - ValidatorId to_address & as_bytes --- primitives/src/validator.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/primitives/src/validator.rs b/primitives/src/validator.rs index 75fdafa47..158024067 100644 --- a/primitives/src/validator.rs +++ b/primitives/src/validator.rs @@ -16,6 +16,14 @@ impl fmt::Debug for ValidatorId { } impl ValidatorId { + pub fn as_bytes(&self) -> &[u8; 20] { + self.0.as_bytes() + } + + pub fn to_address(&self) -> Address { + self.0 + } + pub fn inner(&self) -> &[u8; 20] { &self.0.as_bytes() } From 564ba9c94b39a8fe3bf79f53b810a8a77ba7b6e1 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Tue, 16 Mar 2021 17:49:15 +0200 Subject: [PATCH 12/49] primtivies campaign::Active - fix `to` name --- primitives/src/campaign.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/primitives/src/campaign.rs b/primitives/src/campaign.rs index 3fcbecaeb..94c4b8630 100644 --- a/primitives/src/campaign.rs +++ b/primitives/src/campaign.rs @@ -54,7 +54,7 @@ pub struct Active { /// A sane value should be lower than channel.validUntil * 1000 and higher than created /// It's recommended to set this at least one month prior to channel.validUntil * 1000 #[serde(with = "ts_milliseconds")] - pub active_to: DateTime, + pub to: DateTime, } impl Campaign { From 3f8ec0559229f9ebff1fb50cec797809f622886c Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Wed, 17 Mar 2021 17:14:39 +0200 Subject: [PATCH 13/49] primitives - UnifiedNum --- primitives/src/big_num.rs | 17 +-- primitives/src/lib.rs | 11 +- primitives/src/unified_num.rs | 218 ++++++++++++++++++++++++++++++++++ 3 files changed, 234 insertions(+), 12 deletions(-) create mode 100644 primitives/src/unified_num.rs diff --git a/primitives/src/big_num.rs b/primitives/src/big_num.rs index 38d87d613..c67dcdeff 100644 --- a/primitives/src/big_num.rs +++ b/primitives/src/big_num.rs @@ -1,11 +1,12 @@ -use std::convert::TryFrom; -use std::fmt; -use std::iter::Sum; -use std::ops::{Add, AddAssign, Div, Mul, Sub}; -use std::str::FromStr; - -use num::rational::Ratio; -use num::{BigUint, CheckedSub, Integer}; +use std::{ + convert::TryFrom, + fmt, + iter::Sum, + ops::{Add, AddAssign, Div, Mul, Sub}, + str::FromStr, +}; + +use num::{rational::Ratio, BigUint, CheckedSub, Integer}; use num_derive::{Num, NumOps, One, Zero}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; diff --git a/primitives/src/lib.rs b/primitives/src/lib.rs index 4e202f2ca..66b9c9641 100644 --- a/primitives/src/lib.rs +++ b/primitives/src/lib.rs @@ -1,5 +1,7 @@ #![deny(rust_2018_idioms)] #![deny(clippy::all)] +use std::{error, fmt}; + pub use self::{ ad_slot::AdSlot, ad_unit::AdUnit, @@ -10,14 +12,15 @@ pub use self::{ config::Config, event_submission::EventSubmission, ipfs::IPFS, + unified_num::UnifiedNum, validator::{ValidatorDesc, ValidatorId}, }; -use std::{error, fmt}; mod ad_slot; mod ad_unit; pub mod adapter; pub mod address; +pub mod analytics; pub mod balances_map; pub mod big_num; pub mod campaign; @@ -25,6 +28,7 @@ pub mod channel; pub mod channel_v5; pub mod channel_validator; pub mod config; +mod eth_checksum; pub mod event_submission; pub mod ipfs; pub mod market; @@ -32,6 +36,8 @@ pub mod merkle_tree; pub mod sentry; pub mod supermarket; pub mod targeting; +mod unified_num; +pub mod validator; pub mod util { pub use api::ApiUrl; @@ -52,9 +58,6 @@ pub mod util { pub mod logging; } -pub mod analytics; -mod eth_checksum; -pub mod validator; #[derive(Debug, PartialEq, Eq)] pub enum DomainError { diff --git a/primitives/src/unified_num.rs b/primitives/src/unified_num.rs new file mode 100644 index 000000000..56a404f80 --- /dev/null +++ b/primitives/src/unified_num.rs @@ -0,0 +1,218 @@ +use num::{CheckedSub, Integer, One}; +use num_derive::{Num, NumOps, Zero}; +use std::{ + fmt, + iter::Sum, + ops::{Add, AddAssign, Div, Mul, Sub}, +}; + +use crate::BigNum; + +/// Unified precision Number with precision 8 +#[derive(Num, NumOps, Zero, Default, PartialEq, Eq, PartialOrd, Ord)] +pub struct UnifiedNum(BigNum); + +impl UnifiedNum { + pub const PRECISION: usize = 8; + + pub fn div_floor(&self, other: &Self) -> Self { + Self(self.0.div_floor(&other.0)) + } + + pub fn to_f64(&self) -> Option { + self.0.to_f64() + } + + pub fn to_u64(&self) -> Option { + self.0.to_u64() + } +} + +impl From for UnifiedNum { + fn from(number: u64) -> Self { + Self(BigNum::from(number)) + } +} + +impl From for UnifiedNum { + fn from(number: BigNum) -> Self { + Self(number) + } +} + +impl fmt::Display for UnifiedNum { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut string_value = self.0.to_str_radix(10); + let value_length = string_value.len(); + + if value_length > Self::PRECISION { + string_value.insert_str(value_length - Self::PRECISION, "."); + + f.write_str(&string_value) + } else { + write!(f, "0.{:0>8}", string_value) + } + } +} + +impl fmt::Debug for UnifiedNum { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "UnifiedNum({})", self.to_string()) + } +} + +impl One for UnifiedNum { + fn one() -> Self { + Self(BigNum::from(10_000_000)) + } +} + +impl Integer for UnifiedNum { + fn div_floor(&self, other: &Self) -> Self { + self.0.div_floor(&other.0).into() + } + + fn mod_floor(&self, other: &Self) -> Self { + self.0.mod_floor(&other.0).into() + } + + fn gcd(&self, other: &Self) -> Self { + self.0.gcd(&other.0).into() + } + + fn lcm(&self, other: &Self) -> Self { + self.0.lcm(&other.0).into() + } + + fn divides(&self, other: &Self) -> bool { + self.0.divides(&other.0) + } + + fn is_multiple_of(&self, other: &Self) -> bool { + self.0.is_multiple_of(&other.0) + } + + fn is_even(&self) -> bool { + self.0.is_even() + } + + fn is_odd(&self) -> bool { + !self.is_even() + } + + fn div_rem(&self, other: &Self) -> (Self, Self) { + let (quotient, remainder) = self.0.div_rem(&other.0); + + (quotient.into(), remainder.into()) + } +} + +impl Add<&UnifiedNum> for &UnifiedNum { + type Output = UnifiedNum; + + fn add(self, rhs: &UnifiedNum) -> Self::Output { + let bignum = &self.0 + &rhs.0; + UnifiedNum(bignum) + } +} + +impl AddAssign<&UnifiedNum> for UnifiedNum { + fn add_assign(&mut self, rhs: &UnifiedNum) { + self.0 += &rhs.0 + } +} + +impl Sub<&UnifiedNum> for &UnifiedNum { + type Output = UnifiedNum; + + fn sub(self, rhs: &UnifiedNum) -> Self::Output { + let bignum = &self.0 - &rhs.0; + UnifiedNum(bignum) + } +} + +impl Sub<&UnifiedNum> for UnifiedNum { + type Output = UnifiedNum; + + fn sub(self, rhs: &UnifiedNum) -> Self::Output { + let bignum = &self.0 - &rhs.0; + UnifiedNum(bignum) + } +} + +impl Sub for &UnifiedNum { + type Output = UnifiedNum; + + fn sub(self, rhs: UnifiedNum) -> Self::Output { + let bignum = &self.0 - &rhs.0; + UnifiedNum(bignum) + } +} + +impl Div<&UnifiedNum> for &UnifiedNum { + type Output = UnifiedNum; + + fn div(self, rhs: &UnifiedNum) -> Self::Output { + let bignum = &self.0 / &rhs.0; + UnifiedNum(bignum) + } +} + +impl Div<&UnifiedNum> for UnifiedNum { + type Output = UnifiedNum; + + fn div(self, rhs: &UnifiedNum) -> Self::Output { + let bignum = &self.0 / &rhs.0; + UnifiedNum(bignum) + } +} + +impl Mul<&UnifiedNum> for &UnifiedNum { + type Output = UnifiedNum; + + fn mul(self, rhs: &UnifiedNum) -> Self::Output { + let bignum = &self.0 * &rhs.0; + UnifiedNum(bignum) + } +} + +impl Mul<&UnifiedNum> for UnifiedNum { + type Output = UnifiedNum; + + fn mul(self, rhs: &UnifiedNum) -> Self::Output { + let bignum = &self.0 * &rhs.0; + UnifiedNum(bignum) + } +} + +impl<'a> Sum<&'a UnifiedNum> for UnifiedNum { + fn sum>(iter: I) -> Self { + let sum_uint = iter.map(|big_num| &big_num.0).sum(); + + Self(sum_uint) + } +} + +impl CheckedSub for UnifiedNum { + fn checked_sub(&self, v: &Self) -> Option { + self.0.checked_sub(&v.0).map(Self) + } +} + +#[cfg(test)] +mod test { + use crate::UnifiedNum; + + #[test] + fn unified_num_displays_correctly() { + let one = UnifiedNum::from(100_000_000); + let zero_point_one = UnifiedNum::from(10_000_000); + let smallest_value = UnifiedNum::from(1); + let random_value = UnifiedNum::from(144_903_000_567_000); + + assert_eq!("1.00000000", &one.to_string()); + assert_eq!("0.10000000", &zero_point_one.to_string()); + assert_eq!("0.00000001", &smallest_value.to_string()); + assert_eq!("1449030.00567000", &random_value.to_string()); + } +} From be4734241bf8ceca3966302889aca5673542856a Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Thu, 18 Mar 2021 13:20:30 +0200 Subject: [PATCH 14/49] pirmitives - BigNum - impl num::pow::Pow --- primitives/src/big_num.rs | 42 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 41 insertions(+), 1 deletion(-) diff --git a/primitives/src/big_num.rs b/primitives/src/big_num.rs index c67dcdeff..e73c3b1f3 100644 --- a/primitives/src/big_num.rs +++ b/primitives/src/big_num.rs @@ -6,7 +6,7 @@ use std::{ str::FromStr, }; -use num::{rational::Ratio, BigUint, CheckedSub, Integer}; +use num::{pow::Pow, rational::Ratio, BigUint, CheckedSub, Integer}; use num_derive::{Num, NumOps, One, Zero}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; @@ -99,6 +99,46 @@ impl Integer for BigNum { } } +impl Pow for BigNum { + type Output = BigNum; + + fn pow(self, rhs: BigNum) -> Self::Output { + Self(self.0.pow(rhs.0)) + } +} + +impl Pow<&BigNum> for BigNum { + type Output = BigNum; + + fn pow(self, rhs: &BigNum) -> Self::Output { + BigNum(self.0.pow(&rhs.0)) + } +} + +impl Pow for &BigNum { + type Output = BigNum; + + fn pow(self, rhs: BigNum) -> Self::Output { + BigNum(Pow::pow(&self.0, rhs.0)) + } +} + +impl Pow<&BigNum> for &BigNum { + type Output = BigNum; + + fn pow(self, rhs: &BigNum) -> Self::Output { + BigNum(Pow::pow(&self.0, &rhs.0)) + } +} + +impl Pow for BigNum { + type Output = BigNum; + + fn pow(self, rhs: u8) -> Self::Output { + BigNum(self.0.pow(rhs)) + } +} + impl Add<&BigNum> for &BigNum { type Output = BigNum; From 080b5177680a6e9f1c11d3d3130f63cac8b12300 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Thu, 18 Mar 2021 13:21:30 +0200 Subject: [PATCH 15/49] primitives - UnifiedNum - impl num::pow::Pow & to_preicison() --- primitives/src/unified_num.rs | 86 +++++++++++++++++++++++++++++++++-- 1 file changed, 81 insertions(+), 5 deletions(-) diff --git a/primitives/src/unified_num.rs b/primitives/src/unified_num.rs index 56a404f80..433f12571 100644 --- a/primitives/src/unified_num.rs +++ b/primitives/src/unified_num.rs @@ -1,6 +1,7 @@ -use num::{CheckedSub, Integer, One}; +use num::{pow::Pow, CheckedSub, Integer, One}; use num_derive::{Num, NumOps, Zero}; use std::{ + cmp::Ordering, fmt, iter::Sum, ops::{Add, AddAssign, Div, Mul, Sub}, @@ -13,7 +14,7 @@ use crate::BigNum; pub struct UnifiedNum(BigNum); impl UnifiedNum { - pub const PRECISION: usize = 8; + pub const PRECISION: u8 = 8; pub fn div_floor(&self, other: &Self) -> Self { Self(self.0.div_floor(&other.0)) @@ -26,6 +27,17 @@ impl UnifiedNum { pub fn to_u64(&self) -> Option { self.0.to_u64() } + + /// Transform the UnifiedNum precision 8 to a new precision + pub fn to_precision(&self, precision: u8) -> BigNum { + match precision.cmp(&Self::PRECISION) { + Ordering::Equal => self.0.clone(), + Ordering::Less => self + .0 + .div_floor(&BigNum::from(10).pow(Self::PRECISION - precision)), + Ordering::Greater => (&self.0).mul(&BigNum::from(10).pow(precision - Self::PRECISION)), + } + } } impl From for UnifiedNum { @@ -44,9 +56,10 @@ impl fmt::Display for UnifiedNum { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let mut string_value = self.0.to_str_radix(10); let value_length = string_value.len(); + let precision: usize = Self::PRECISION.into(); - if value_length > Self::PRECISION { - string_value.insert_str(value_length - Self::PRECISION, "."); + if value_length > precision { + string_value.insert_str(value_length - precision, "."); f.write_str(&string_value) } else { @@ -107,6 +120,38 @@ impl Integer for UnifiedNum { } } +impl Pow for UnifiedNum { + type Output = UnifiedNum; + + fn pow(self, rhs: UnifiedNum) -> Self::Output { + Self(self.0.pow(rhs.0)) + } +} + +impl Pow<&UnifiedNum> for UnifiedNum { + type Output = UnifiedNum; + + fn pow(self, rhs: &UnifiedNum) -> Self::Output { + UnifiedNum(self.0.pow(&rhs.0)) + } +} + +impl Pow for &UnifiedNum { + type Output = UnifiedNum; + + fn pow(self, rhs: UnifiedNum) -> Self::Output { + UnifiedNum((&self.0).pow(rhs.0)) + } +} + +impl Pow<&UnifiedNum> for &UnifiedNum { + type Output = UnifiedNum; + + fn pow(self, rhs: &UnifiedNum) -> Self::Output { + UnifiedNum((&self.0).pow(&rhs.0)) + } +} + impl Add<&UnifiedNum> for &UnifiedNum { type Output = UnifiedNum; @@ -201,7 +246,7 @@ impl CheckedSub for UnifiedNum { #[cfg(test)] mod test { - use crate::UnifiedNum; + use super::*; #[test] fn unified_num_displays_correctly() { @@ -215,4 +260,35 @@ mod test { assert_eq!("0.00000001", &smallest_value.to_string()); assert_eq!("1449030.00567000", &random_value.to_string()); } + + #[test] + fn test_convert_unified_num_to_new_precision() { + let dai_precision: u8 = 18; + let usdt_precision: u8 = 6; + let same_precision = UnifiedNum::PRECISION; + + let dai_power = BigNum::from(10).pow(BigNum::from(dai_precision as u64)); + + // 321.00000000 + let dai_unified = UnifiedNum::from(32_100_000_000_u64); + let dai_expected = BigNum::from(321_u64) * dai_power; + assert_eq!(dai_expected, dai_unified.to_precision(dai_precision)); + + // 321.00000777 - should floor to 321.000007 (precision 6) + let usdt_unified = UnifiedNum::from(32_100_000_777_u64); + let usdt_expected = BigNum::from(321_000_007_u64); + assert_eq!( + usdt_expected, + usdt_unified.to_precision(usdt_precision), + "It should floor the result of USDT" + ); + + // 321.00000999 + let same_unified = UnifiedNum::from(32_100_000_777_u64); + assert_eq!( + same_unified.0, + same_unified.to_precision(same_precision), + "It should not make any adjustments to the precision" + ); + } } From 9815a2417010d38213032ebdae0dc304c8891c5e Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Thu, 18 Mar 2021 13:30:37 +0200 Subject: [PATCH 16/49] primitives - UnifiedNum - test for Zero & One --- primitives/src/unified_num.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/primitives/src/unified_num.rs b/primitives/src/unified_num.rs index 433f12571..ef2797c0b 100644 --- a/primitives/src/unified_num.rs +++ b/primitives/src/unified_num.rs @@ -76,7 +76,7 @@ impl fmt::Debug for UnifiedNum { impl One for UnifiedNum { fn one() -> Self { - Self(BigNum::from(10_000_000)) + Self(BigNum::from(100_000_000)) } } @@ -247,6 +247,7 @@ impl CheckedSub for UnifiedNum { #[cfg(test)] mod test { use super::*; + use num::Zero; #[test] fn unified_num_displays_correctly() { @@ -256,6 +257,8 @@ mod test { let random_value = UnifiedNum::from(144_903_000_567_000); assert_eq!("1.00000000", &one.to_string()); + assert_eq!("1.00000000", &UnifiedNum::one().to_string()); + assert_eq!("0.00000000", &UnifiedNum::zero().to_string()); assert_eq!("0.10000000", &zero_point_one.to_string()); assert_eq!("0.00000001", &smallest_value.to_string()); assert_eq!("1449030.00567000", &random_value.to_string()); From 055307ca32e7de1a6cb79d09c560f4291de4cbbe Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Thu, 18 Mar 2021 15:08:54 +0200 Subject: [PATCH 17/49] primitives - BigNum - impl Display instead of ToString --- primitives/src/big_num.rs | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/primitives/src/big_num.rs b/primitives/src/big_num.rs index e73c3b1f3..c9e28bea5 100644 --- a/primitives/src/big_num.rs +++ b/primitives/src/big_num.rs @@ -59,6 +59,12 @@ impl fmt::Debug for BigNum { } } +impl fmt::Display for BigNum { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + impl Integer for BigNum { fn div_floor(&self, other: &Self) -> Self { self.0.div_floor(&other.0).into() @@ -250,12 +256,6 @@ impl FromStr for BigNum { } } -impl ToString for BigNum { - fn to_string(&self) -> String { - self.0.to_str_radix(10) - } -} - impl From for BigNum { fn from(value: u64) -> Self { Self(BigUint::from(value)) @@ -338,4 +338,11 @@ mod test { let expected: BigNum = 11.into(); assert_eq!(expected, &big_num * &ratio); } + #[test] + fn bignum_formatting() { + let bignum: BigNum = 5000.into(); + + assert_eq!("5000", &bignum.to_string()); + assert_eq!("BigNum(radix: 10; 5000)", &format!("{:?}", &bignum)); + } } From 0804b77b1028d3fe42e1ee6b29a43ae1a8b21129 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Thu, 18 Mar 2021 16:36:47 +0200 Subject: [PATCH 18/49] primitives - ValidatorMessage with a Generic --- primitives/src/sentry.rs | 121 ++++++++++++++----------- primitives/src/validator.rs | 140 ++++++++++++++++++++++++++++- sentry/src/db/event_aggregate.rs | 40 +++++---- sentry/src/db/validator_message.rs | 3 +- sentry/src/middleware/auth.rs | 2 +- sentry/src/routes/channel.rs | 8 +- validator_worker/src/follower.rs | 5 +- 7 files changed, 235 insertions(+), 84 deletions(-) diff --git a/primitives/src/sentry.rs b/primitives/src/sentry.rs index f04c7cc3e..6489e6263 100644 --- a/primitives/src/sentry.rs +++ b/primitives/src/sentry.rs @@ -1,40 +1,66 @@ -use crate::targeting::Rules; -use crate::validator::MessageTypes; -use crate::{BigNum, Channel, ChannelId, ValidatorId}; +use crate::{ + targeting::Rules, + validator::Type as MessageType, + validator::{ApproveState, Heartbeat, MessageTypes, NewState}, + BigNum, Channel, ChannelId, ValidatorId, +}; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; -use std::collections::HashMap; -use std::fmt; -use std::hash::Hash; +use std::{collections::HashMap, fmt, hash::Hash}; #[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] #[serde(rename_all = "camelCase")] pub struct LastApproved { /// NewState can be None if the channel is brand new - pub new_state: Option, + pub new_state: Option>, /// ApproveState can be None if the channel is brand new - pub approve_state: Option, + pub approve_state: Option>, } #[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] -pub struct NewStateValidatorMessage { +pub struct MessageResponse { pub from: ValidatorId, pub received: DateTime, - pub msg: MessageTypes, + pub msg: message::Message, } -#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] -pub struct ApproveStateValidatorMessage { - pub from: ValidatorId, - pub received: DateTime, - pub msg: MessageTypes, -} +pub mod message { + use std::{convert::TryFrom, ops::Deref}; -#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] -pub struct HeartbeatValidatorMessage { - pub from: ValidatorId, - pub received: DateTime, - pub msg: MessageTypes, + use crate::validator::messages::*; + use serde::{Deserialize, Serialize}; + + #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)] + #[serde(try_from = "MessageTypes", into = "MessageTypes")] + pub struct Message(T); + + impl Message { + pub fn into_inner(self) -> T { + self.0 + } + } + + impl Deref for Message { + type Target = T; + + fn deref(&self) -> &Self::Target { + &self.0 + } + } + + impl TryFrom for Message { + type Error = MessageTypeError; + + fn try_from(value: MessageTypes) -> Result { + >::try_from(value).map(Self) + } + } + + impl Into for Message { + fn into(self) -> MessageTypes { + self.0.into() + } + } } #[serde(tag = "type", rename_all = "SCREAMING_SNAKE_CASE")] @@ -119,7 +145,7 @@ pub struct LastApprovedResponse { /// None -> withHeartbeat=true wasn't passed /// Some(vec![]) (empty vec) or Some(heartbeats) - withHeartbeat=true was passed #[serde(default, skip_serializing_if = "Option::is_none")] - pub heartbeats: Option>, + pub heartbeats: Option>>, } #[derive(Serialize, Deserialize, Debug)] @@ -232,16 +258,16 @@ pub mod channel_list { #[cfg(feature = "postgres")] mod postgres { - use super::{ - ApproveStateValidatorMessage, HeartbeatValidatorMessage, NewStateValidatorMessage, - ValidatorMessage, + use super::{MessageResponse, ValidatorMessage}; + use crate::{ + sentry::EventAggregate, + validator::{messages::Type as MessageType, MessageTypes}, }; - use crate::sentry::EventAggregate; - use crate::validator::MessageTypes; use bytes::BytesMut; use postgres_types::{accepts, to_sql_checked, IsNull, Json, ToSql, Type}; - use std::error::Error; - use tokio_postgres::Row; + use serde::Deserialize; + use std::convert::TryFrom; + use tokio_postgres::{Error, Row}; impl From<&Row> for EventAggregate { fn from(row: &Row) -> Self { @@ -263,33 +289,20 @@ mod postgres { } } - impl From<&Row> for ApproveStateValidatorMessage { - fn from(row: &Row) -> Self { - Self { - from: row.get("from"), - received: row.get("received"), - msg: row.get::<_, Json>("msg").0, - } - } - } + impl TryFrom<&Row> for MessageResponse + where + T: MessageType, + for<'de> T: Deserialize<'de>, + { + type Error = Error; - impl From<&Row> for NewStateValidatorMessage { - fn from(row: &Row) -> Self { - Self { + fn try_from(row: &Row) -> Result { + Ok(Self { from: row.get("from"), received: row.get("received"), - msg: row.get::<_, Json>("msg").0, - } - } - } - - impl From<&Row> for HeartbeatValidatorMessage { - fn from(row: &Row) -> Self { - Self { - from: row.get("from"), - received: row.get("received"), - msg: row.get::<_, Json>("msg").0, - } + // guard against mistakes from wrong Queries + msg: row.try_get::<_, Json<_>>("msg")?.0, + }) } } @@ -298,7 +311,7 @@ mod postgres { &self, ty: &Type, w: &mut BytesMut, - ) -> Result> { + ) -> Result> { Json(self).to_sql(ty, w) } diff --git a/primitives/src/validator.rs b/primitives/src/validator.rs index 158024067..91ef256b4 100644 --- a/primitives/src/validator.rs +++ b/primitives/src/validator.rs @@ -100,11 +100,149 @@ pub struct ValidatorDesc { // Validator Message Types -mod messages { +pub mod messages { + use std::{any::type_name, convert::TryFrom, fmt, marker::PhantomData}; + use thiserror::Error; + use crate::BalancesMap; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; + #[derive(Error, Debug)] + pub struct MessageTypeError { + expected: PhantomData, + actual: String, + } + + impl MessageTypeError { + pub fn for_actual(_actual: &A) -> Self { + Self { + expected: PhantomData::default(), + actual: type_name::().to_string(), + } + } + } + + impl fmt::Display for MessageTypeError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "Expected {} message type but the actual is {}", + type_name::(), + self.actual + ) + } + } + + pub trait Type: + fmt::Debug + + Into + + TryFrom> + + Clone + + PartialEq + + Eq + { + } + + impl Type for Accounting {} + impl TryFrom for Accounting { + type Error = MessageTypeError; + + fn try_from(value: MessageTypes) -> Result { + match value { + MessageTypes::ApproveState(msg) => Err(MessageTypeError::for_actual(&msg)), + MessageTypes::NewState(msg) => Err(MessageTypeError::for_actual(&msg)), + MessageTypes::RejectState(msg) => Err(MessageTypeError::for_actual(&msg)), + MessageTypes::Heartbeat(msg) => Err(MessageTypeError::for_actual(&msg)), + MessageTypes::Accounting(accounting) => Ok(accounting), + } + } + } + impl Into for Accounting { + fn into(self) -> MessageTypes { + MessageTypes::Accounting(self) + } + } + + impl Type for ApproveState {} + impl TryFrom for ApproveState { + type Error = MessageTypeError; + + fn try_from(value: MessageTypes) -> Result { + match value { + MessageTypes::NewState(msg) => Err(MessageTypeError::for_actual(&msg)), + MessageTypes::RejectState(msg) => Err(MessageTypeError::for_actual(&msg)), + MessageTypes::Heartbeat(msg) => Err(MessageTypeError::for_actual(&msg)), + MessageTypes::Accounting(msg) => Err(MessageTypeError::for_actual(&msg)), + MessageTypes::ApproveState(approve_state) => Ok(approve_state), + } + } + } + impl Into for ApproveState { + fn into(self) -> MessageTypes { + MessageTypes::ApproveState(self) + } + } + + impl Type for NewState {} + impl TryFrom for NewState { + type Error = MessageTypeError; + + fn try_from(value: MessageTypes) -> Result { + match value { + MessageTypes::ApproveState(msg) => Err(MessageTypeError::for_actual(&msg)), + MessageTypes::RejectState(msg) => Err(MessageTypeError::for_actual(&msg)), + MessageTypes::Heartbeat(msg) => Err(MessageTypeError::for_actual(&msg)), + MessageTypes::Accounting(msg) => Err(MessageTypeError::for_actual(&msg)), + MessageTypes::NewState(new_state) => Ok(new_state), + } + } + } + impl Into for NewState { + fn into(self) -> MessageTypes { + MessageTypes::NewState(self) + } + } + + impl Type for RejectState {} + impl TryFrom for RejectState { + type Error = MessageTypeError; + + fn try_from(value: MessageTypes) -> Result { + match value { + MessageTypes::ApproveState(msg) => Err(MessageTypeError::for_actual(&msg)), + MessageTypes::NewState(msg) => Err(MessageTypeError::for_actual(&msg)), + MessageTypes::Heartbeat(msg) => Err(MessageTypeError::for_actual(&msg)), + MessageTypes::Accounting(msg) => Err(MessageTypeError::for_actual(&msg)), + MessageTypes::RejectState(reject_state) => Ok(reject_state), + } + } + } + impl Into for RejectState { + fn into(self) -> MessageTypes { + MessageTypes::RejectState(self) + } + } + + impl Type for Heartbeat {} + impl TryFrom for Heartbeat { + type Error = MessageTypeError; + + fn try_from(value: MessageTypes) -> Result { + match value { + MessageTypes::ApproveState(msg) => Err(MessageTypeError::for_actual(&msg)), + MessageTypes::NewState(msg) => Err(MessageTypeError::for_actual(&msg)), + MessageTypes::RejectState(msg) => Err(MessageTypeError::for_actual(&msg)), + MessageTypes::Accounting(msg) => Err(MessageTypeError::for_actual(&msg)), + MessageTypes::Heartbeat(heartbeat) => Ok(heartbeat), + } + } + } + impl Into for Heartbeat { + fn into(self) -> MessageTypes { + MessageTypes::Heartbeat(self) + } + } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(rename_all = "camelCase")] pub struct Accounting { diff --git a/sentry/src/db/event_aggregate.rs b/sentry/src/db/event_aggregate.rs index b0de773a0..faf55e4d5 100644 --- a/sentry/src/db/event_aggregate.rs +++ b/sentry/src/db/event_aggregate.rs @@ -1,22 +1,23 @@ use crate::db::DbPool; use bb8::RunError; -use bb8_postgres::tokio_postgres::binary_copy::BinaryCopyInWriter; -use bb8_postgres::tokio_postgres::types::{ToSql, Type}; -use bb8_postgres::tokio_postgres::Error; +use bb8_postgres::tokio_postgres::{ + binary_copy::BinaryCopyInWriter, + types::{ToSql, Type}, + Error, +}; use chrono::{DateTime, Utc}; use futures::pin_mut; -use primitives::sentry::{ - ApproveStateValidatorMessage, EventAggregate, HeartbeatValidatorMessage, - NewStateValidatorMessage, +use primitives::{ + sentry::{EventAggregate, MessageResponse}, + validator::{ApproveState, Heartbeat, NewState}, + BigNum, Channel, ChannelId, ValidatorId, }; -use primitives::BigNum; -use primitives::{Channel, ChannelId, ValidatorId}; -use std::ops::Add; +use std::{convert::TryFrom, ops::Add}; pub async fn latest_approve_state( pool: &DbPool, channel: &Channel, -) -> Result, RunError> { +) -> Result>, RunError> { let connection = pool.get().await?; let select = connection.prepare("SELECT \"from\", msg, received FROM validator_messages WHERE channel_id = $1 AND \"from\" = $2 AND msg ->> 'type' = 'ApproveState' ORDER BY received DESC LIMIT 1").await?; @@ -27,14 +28,17 @@ pub async fn latest_approve_state( ) .await?; - Ok(rows.get(0).map(ApproveStateValidatorMessage::from)) + rows.get(0) + .map(MessageResponse::::try_from) + .transpose() + .map_err(RunError::User) } pub async fn latest_new_state( pool: &DbPool, channel: &Channel, state_root: &str, -) -> Result, RunError> { +) -> Result>, RunError> { let connection = pool.get().await?; let select = connection.prepare("SELECT \"from\", msg, received FROM validator_messages WHERE channel_id = $1 AND \"from\" = $2 AND msg ->> 'type' = 'NewState' AND msg->> 'stateRoot' = $3 ORDER BY received DESC LIMIT 1").await?; @@ -49,14 +53,17 @@ pub async fn latest_new_state( ) .await?; - Ok(rows.get(0).map(NewStateValidatorMessage::from)) + rows.get(0) + .map(MessageResponse::::try_from) + .transpose() + .map_err(RunError::User) } pub async fn latest_heartbeats( pool: &DbPool, channel_id: &ChannelId, validator_id: &ValidatorId, -) -> Result, RunError> { +) -> Result>, RunError> { let connection = pool.get().await?; let select = connection.prepare("SELECT \"from\", msg, received FROM validator_messages WHERE channel_id = $1 AND \"from\" = $2 AND msg ->> 'type' = 'Heartbeat' ORDER BY received DESC LIMIT 2").await?; @@ -64,7 +71,10 @@ pub async fn latest_heartbeats( .query(&select, &[&channel_id, &validator_id]) .await?; - Ok(rows.iter().map(HeartbeatValidatorMessage::from).collect()) + rows.iter() + .map(MessageResponse::::try_from) + .collect::>() + .map_err(RunError::User) } pub async fn list_event_aggregates( diff --git a/sentry/src/db/validator_message.rs b/sentry/src/db/validator_message.rs index c8e8eb487..ecc4c467a 100644 --- a/sentry/src/db/validator_message.rs +++ b/sentry/src/db/validator_message.rs @@ -1,8 +1,7 @@ use crate::db::DbPool; use bb8::RunError; use bb8_postgres::tokio_postgres::types::ToSql; -use primitives::sentry::ValidatorMessage; -use primitives::{ChannelId, ValidatorId}; +use primitives::{sentry::ValidatorMessage, ChannelId, ValidatorId}; pub async fn get_validator_messages( pool: &DbPool, diff --git a/sentry/src/middleware/auth.rs b/sentry/src/middleware/auth.rs index 82c9b6548..ce404e98a 100644 --- a/sentry/src/middleware/auth.rs +++ b/sentry/src/middleware/auth.rs @@ -133,7 +133,7 @@ mod test { use primitives::util::tests::prep_db::{AUTH, IDS}; - use primitives::{config::configuration}; + use primitives::config::configuration; use deadpool::managed::Object; diff --git a/sentry/src/routes/channel.rs b/sentry/src/routes/channel.rs index a940abbb2..e5178d5dd 100644 --- a/sentry/src/routes/channel.rs +++ b/sentry/src/routes/channel.rs @@ -138,13 +138,7 @@ pub async fn last_approved( None => return Ok(default_response), }; - let state_root = match approve_state.msg.clone() { - MessageTypes::ApproveState(approve_state) => approve_state.state_root, - _ => { - error!(&app.logger, "{}", "failed to retrieve approved"; "module" => "last_approved"); - return Err(ResponseError::BadRequest("an error occurred".to_string())); - } - }; + let state_root = approve_state.msg.state_root.clone(); let new_state = latest_new_state(&app.pool, &channel, &state_root).await?; if new_state.is_none() { diff --git a/validator_worker/src/follower.rs b/validator_worker/src/follower.rs index a04ea2dc7..7d200deeb 100644 --- a/validator_worker/src/follower.rs +++ b/validator_worker/src/follower.rs @@ -121,10 +121,7 @@ async fn on_new_state<'a, A: Adapter + 'static>( .last_approved .and_then(|last_approved| last_approved.new_state) { - Some(new_state) => match new_state.msg { - MessageTypes::NewState(new_state) => new_state.balances, - _ => Default::default(), - }, + Some(new_state) => new_state.msg.into_inner().balances, _ => Default::default(), }; From a086b9ccc6dfebd768942e0e84bac45e4c107524 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Fri, 19 Mar 2021 11:07:17 +0200 Subject: [PATCH 19/49] primitives - impl FromStr for ValidatorId --- primitives/src/validator.rs | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/primitives/src/validator.rs b/primitives/src/validator.rs index 91ef256b4..dd0a4baaf 100644 --- a/primitives/src/validator.rs +++ b/primitives/src/validator.rs @@ -1,5 +1,5 @@ use serde::{Deserialize, Serialize}; -use std::{convert::TryFrom, fmt}; +use std::{convert::TryFrom, fmt, str::FromStr}; use crate::{targeting::Value, Address, BigNum, DomainError, ToETHChecksum, ToHex}; @@ -57,6 +57,14 @@ impl AsRef<[u8]> for ValidatorId { } } +impl FromStr for ValidatorId { + type Err = crate::address::Error; + + fn from_str(s: &str) -> Result { + Address::try_from(s).map(Self) + } +} + impl TryFrom<&str> for ValidatorId { type Error = DomainError; fn try_from(value: &str) -> Result { @@ -98,8 +106,7 @@ pub struct ValidatorDesc { pub fee: BigNum, } -// Validator Message Types - +/// Validator Message Types pub mod messages { use std::{any::type_name, convert::TryFrom, fmt, marker::PhantomData}; use thiserror::Error; From 6f149b28f36331217c45fb2190b8cd760fc5dfdb Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Fri, 19 Mar 2021 11:07:35 +0200 Subject: [PATCH 20/49] primitives - sentry - MessageResponse test --- primitives/src/sentry.rs | 44 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/primitives/src/sentry.rs b/primitives/src/sentry.rs index 6489e6263..b2deb9835 100644 --- a/primitives/src/sentry.rs +++ b/primitives/src/sentry.rs @@ -35,6 +35,10 @@ pub mod message { pub struct Message(T); impl Message { + pub fn new(message: T) -> Self { + Self(message) + } + pub fn into_inner(self) -> T { self.0 } @@ -61,6 +65,46 @@ pub mod message { self.0.into() } } + + #[cfg(test)] + mod test { + use super::*; + use crate::sentry::MessageResponse; + use chrono::{TimeZone, Utc}; + use serde_json::{from_value, json, to_value}; + + #[test] + fn de_serialization_of_a_message() { + let approve_state_message = json!({ + "from":"0x2892f6C41E0718eeeDd49D98D648C789668cA67d", + "msg": { + "type":"ApproveState", + "stateRoot":"4739522efc1e81499541621759dadb331eaf08829d6a3851b4b654dfaddc9935", + "signature":"0x00128a39b715e87475666c3220fc0400bf34a84d24f77571d2b4e1e88b141d52305438156e526ff4fe96b7a13e707ab2f6f3ca00bd928dabc7f516b56cfe6fd61c", + "isHealthy":true, + "exhausted":false + }, + "received":"2021-01-05T14:00:48.549Z" + }); + + let actual_message: MessageResponse = + from_value(approve_state_message.clone()).expect("Should deserialize"); + let expected_message = MessageResponse { + from: "0x2892f6C41E0718eeeDd49D98D648C789668cA67d".parse().expect("Valid ValidatorId"), + received: Utc.ymd(2021, 1, 5).and_hms_milli(14,0,48, 549), + msg: Message::new(ApproveState { + state_root: "4739522efc1e81499541621759dadb331eaf08829d6a3851b4b654dfaddc9935".to_string(), + signature: "0x00128a39b715e87475666c3220fc0400bf34a84d24f77571d2b4e1e88b141d52305438156e526ff4fe96b7a13e707ab2f6f3ca00bd928dabc7f516b56cfe6fd61c".to_string(), + is_healthy: true, + exhausted: false, + }), + }; + + pretty_assertions::assert_eq!(expected_message, actual_message); + pretty_assertions::assert_eq!(to_value(expected_message).expect("should serialize"), approve_state_message); + + } + } } #[serde(tag = "type", rename_all = "SCREAMING_SNAKE_CASE")] From eb006a3bd42b7af63201fbc205d31abb52b05cca Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Fri, 19 Mar 2021 11:27:23 +0200 Subject: [PATCH 21/49] primivies - ValidatorId change errors to address::Error --- adapter/src/ethereum/error.rs | 2 +- primitives/src/lib.rs | 6 ++++++ primitives/src/sentry.rs | 6 ++++-- primitives/src/validator.rs | 12 +++++------- sentry/src/routes/validator_message.rs | 3 +-- 5 files changed, 17 insertions(+), 12 deletions(-) diff --git a/adapter/src/ethereum/error.rs b/adapter/src/ethereum/error.rs index 4026458c4..c4e4e5b30 100644 --- a/adapter/src/ethereum/error.rs +++ b/adapter/src/ethereum/error.rs @@ -84,7 +84,7 @@ pub enum KeystoreError { /// `address` key is missing from the keystore file AddressMissing, /// The `address` key in the keystore file is not a valid `ValidatorId` - AddressInvalid(primitives::DomainError), + AddressInvalid(primitives::address::Error), /// reading the keystore file failed ReadingFile(std::io::Error), /// Deserializing the keystore file failed diff --git a/primitives/src/lib.rs b/primitives/src/lib.rs index 4e202f2ca..e85efef18 100644 --- a/primitives/src/lib.rs +++ b/primitives/src/lib.rs @@ -71,6 +71,12 @@ impl fmt::Display for DomainError { } } +impl From for DomainError { + fn from(error: address::Error) -> Self { + Self::InvalidArgument(error.to_string()) + } +} + impl error::Error for DomainError { fn cause(&self) -> Option<&dyn error::Error> { None diff --git a/primitives/src/sentry.rs b/primitives/src/sentry.rs index b2deb9835..62da64a76 100644 --- a/primitives/src/sentry.rs +++ b/primitives/src/sentry.rs @@ -101,8 +101,10 @@ pub mod message { }; pretty_assertions::assert_eq!(expected_message, actual_message); - pretty_assertions::assert_eq!(to_value(expected_message).expect("should serialize"), approve_state_message); - + pretty_assertions::assert_eq!( + to_value(expected_message).expect("should serialize"), + approve_state_message + ); } } } diff --git a/primitives/src/validator.rs b/primitives/src/validator.rs index dd0a4baaf..f17a7008a 100644 --- a/primitives/src/validator.rs +++ b/primitives/src/validator.rs @@ -1,7 +1,7 @@ use serde::{Deserialize, Serialize}; use std::{convert::TryFrom, fmt, str::FromStr}; -use crate::{targeting::Value, Address, BigNum, DomainError, ToETHChecksum, ToHex}; +use crate::{address::Error, targeting::Value, Address, BigNum, DomainError, ToETHChecksum, ToHex}; pub use messages::*; @@ -58,7 +58,7 @@ impl AsRef<[u8]> for ValidatorId { } impl FromStr for ValidatorId { - type Err = crate::address::Error; + type Err = Error; fn from_str(s: &str) -> Result { Address::try_from(s).map(Self) @@ -66,16 +66,14 @@ impl FromStr for ValidatorId { } impl TryFrom<&str> for ValidatorId { - type Error = DomainError; + type Error = Error; fn try_from(value: &str) -> Result { - Address::try_from(value) - .map_err(|err| DomainError::InvalidArgument(err.to_string())) - .map(Self) + Address::try_from(value).map(Self) } } impl TryFrom<&String> for ValidatorId { - type Error = DomainError; + type Error = Error; fn try_from(value: &String) -> Result { Self::try_from(value.as_str()) diff --git a/sentry/src/routes/validator_message.rs b/sentry/src/routes/validator_message.rs index a171ac396..3c2fc375d 100644 --- a/sentry/src/routes/validator_message.rs +++ b/sentry/src/routes/validator_message.rs @@ -5,7 +5,6 @@ use primitives::adapter::Adapter; use primitives::sentry::ValidatorMessageResponse; use primitives::{Channel, DomainError, ValidatorId}; use serde::Deserialize; -use std::convert::TryFrom; #[derive(Deserialize)] pub struct ValidatorMessagesListQuery { @@ -28,7 +27,7 @@ pub fn extract_params(from_path: &str) -> Result<(Option, Vec Date: Mon, 22 Mar 2021 13:53:37 +0200 Subject: [PATCH 22/49] primitives - UnifiedNum - use u64 as internal value --- primitives/src/big_num.rs | 8 ++ primitives/src/unified_num.rs | 170 +++++++++++++++++++--------------- sentry/src/middleware/auth.rs | 2 +- 3 files changed, 104 insertions(+), 76 deletions(-) diff --git a/primitives/src/big_num.rs b/primitives/src/big_num.rs index c9e28bea5..82cdb8d78 100644 --- a/primitives/src/big_num.rs +++ b/primitives/src/big_num.rs @@ -10,6 +10,8 @@ use num::{pow::Pow, rational::Ratio, BigUint, CheckedSub, Integer}; use num_derive::{Num, NumOps, One, Zero}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use crate::UnifiedNum; + #[derive( Serialize, Deserialize, Clone, PartialEq, Eq, PartialOrd, Ord, NumOps, One, Zero, Num, Default, )] @@ -268,6 +270,12 @@ impl From for BigNum { } } +impl<'a> Sum<&'a UnifiedNum> for BigNum { + fn sum>(iter: I) -> BigNum { + BigNum(iter.map(|unified| BigUint::from(unified.to_u64())).sum()) + } +} + fn biguint_from_str<'de, D>(deserializer: D) -> Result where D: Deserializer<'de>, diff --git a/primitives/src/unified_num.rs b/primitives/src/unified_num.rs index ef2797c0b..cc1a532cb 100644 --- a/primitives/src/unified_num.rs +++ b/primitives/src/unified_num.rs @@ -1,5 +1,6 @@ -use num::{pow::Pow, CheckedSub, Integer, One}; -use num_derive::{Num, NumOps, Zero}; +use num::{pow::Pow, CheckedAdd, CheckedDiv, CheckedMul, CheckedSub, Integer, One}; +use num_derive::{FromPrimitive, Num, NumCast, NumOps, ToPrimitive, Zero}; +use num_traits::CheckedRem; use std::{ cmp::Ordering, fmt, @@ -10,8 +11,22 @@ use std::{ use crate::BigNum; /// Unified precision Number with precision 8 -#[derive(Num, NumOps, Zero, Default, PartialEq, Eq, PartialOrd, Ord)] -pub struct UnifiedNum(BigNum); +#[derive( + Clone, + Copy, + Num, + NumOps, + NumCast, + ToPrimitive, + FromPrimitive, + Zero, + Default, + PartialEq, + Eq, + PartialOrd, + Ord, +)] +pub struct UnifiedNum(u64); impl UnifiedNum { pub const PRECISION: u8 = 8; @@ -20,41 +35,50 @@ impl UnifiedNum { Self(self.0.div_floor(&other.0)) } - pub fn to_f64(&self) -> Option { - self.0.to_f64() + pub fn to_u64(&self) -> u64 { + self.0 } - pub fn to_u64(&self) -> Option { - self.0.to_u64() + pub fn checked_add(&self, rhs: &UnifiedNum) -> Option { + CheckedAdd::checked_add(self, rhs) + } + + pub fn checked_sub(&self, rhs: &UnifiedNum) -> Option { + CheckedSub::checked_sub(self, rhs) + } + + pub fn checked_mul(&self, rhs: &UnifiedNum) -> Option { + CheckedMul::checked_mul(self, rhs) + } + + pub fn checked_div(&self, rhs: &UnifiedNum) -> Option { + CheckedDiv::checked_div(self, rhs) + } + + pub fn checked_rem(&self, rhs: &UnifiedNum) -> Option { + CheckedRem::checked_rem(self, rhs) } /// Transform the UnifiedNum precision 8 to a new precision pub fn to_precision(&self, precision: u8) -> BigNum { + let inner = BigNum::from(self.0); match precision.cmp(&Self::PRECISION) { - Ordering::Equal => self.0.clone(), - Ordering::Less => self - .0 - .div_floor(&BigNum::from(10).pow(Self::PRECISION - precision)), - Ordering::Greater => (&self.0).mul(&BigNum::from(10).pow(precision - Self::PRECISION)), + Ordering::Equal => inner, + Ordering::Less => inner.div_floor(&BigNum::from(10).pow(Self::PRECISION - precision)), + Ordering::Greater => inner.mul(&BigNum::from(10).pow(precision - Self::PRECISION)), } } } impl From for UnifiedNum { fn from(number: u64) -> Self { - Self(BigNum::from(number)) - } -} - -impl From for UnifiedNum { - fn from(number: BigNum) -> Self { Self(number) } } impl fmt::Display for UnifiedNum { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut string_value = self.0.to_str_radix(10); + let mut string_value = self.0.to_string(); let value_length = string_value.len(); let precision: usize = Self::PRECISION.into(); @@ -76,7 +100,7 @@ impl fmt::Debug for UnifiedNum { impl One for UnifiedNum { fn one() -> Self { - Self(BigNum::from(100_000_000)) + Self(100_000_000) } } @@ -120,44 +144,11 @@ impl Integer for UnifiedNum { } } -impl Pow for UnifiedNum { - type Output = UnifiedNum; - - fn pow(self, rhs: UnifiedNum) -> Self::Output { - Self(self.0.pow(rhs.0)) - } -} - -impl Pow<&UnifiedNum> for UnifiedNum { - type Output = UnifiedNum; - - fn pow(self, rhs: &UnifiedNum) -> Self::Output { - UnifiedNum(self.0.pow(&rhs.0)) - } -} - -impl Pow for &UnifiedNum { - type Output = UnifiedNum; - - fn pow(self, rhs: UnifiedNum) -> Self::Output { - UnifiedNum((&self.0).pow(rhs.0)) - } -} - -impl Pow<&UnifiedNum> for &UnifiedNum { - type Output = UnifiedNum; - - fn pow(self, rhs: &UnifiedNum) -> Self::Output { - UnifiedNum((&self.0).pow(&rhs.0)) - } -} - impl Add<&UnifiedNum> for &UnifiedNum { type Output = UnifiedNum; fn add(self, rhs: &UnifiedNum) -> Self::Output { - let bignum = &self.0 + &rhs.0; - UnifiedNum(bignum) + UnifiedNum(self.0 + rhs.0) } } @@ -171,8 +162,7 @@ impl Sub<&UnifiedNum> for &UnifiedNum { type Output = UnifiedNum; fn sub(self, rhs: &UnifiedNum) -> Self::Output { - let bignum = &self.0 - &rhs.0; - UnifiedNum(bignum) + UnifiedNum(self.0 - rhs.0) } } @@ -180,8 +170,7 @@ impl Sub<&UnifiedNum> for UnifiedNum { type Output = UnifiedNum; fn sub(self, rhs: &UnifiedNum) -> Self::Output { - let bignum = &self.0 - &rhs.0; - UnifiedNum(bignum) + UnifiedNum(self.0 - rhs.0) } } @@ -189,8 +178,7 @@ impl Sub for &UnifiedNum { type Output = UnifiedNum; fn sub(self, rhs: UnifiedNum) -> Self::Output { - let bignum = &self.0 - &rhs.0; - UnifiedNum(bignum) + UnifiedNum(self.0 - rhs.0) } } @@ -198,8 +186,7 @@ impl Div<&UnifiedNum> for &UnifiedNum { type Output = UnifiedNum; fn div(self, rhs: &UnifiedNum) -> Self::Output { - let bignum = &self.0 / &rhs.0; - UnifiedNum(bignum) + UnifiedNum(self.0 / rhs.0) } } @@ -207,8 +194,7 @@ impl Div<&UnifiedNum> for UnifiedNum { type Output = UnifiedNum; fn div(self, rhs: &UnifiedNum) -> Self::Output { - let bignum = &self.0 / &rhs.0; - UnifiedNum(bignum) + UnifiedNum(self.0 / rhs.0) } } @@ -216,8 +202,7 @@ impl Mul<&UnifiedNum> for &UnifiedNum { type Output = UnifiedNum; fn mul(self, rhs: &UnifiedNum) -> Self::Output { - let bignum = &self.0 * &rhs.0; - UnifiedNum(bignum) + UnifiedNum(self.0 * rhs.0) } } @@ -225,22 +210,44 @@ impl Mul<&UnifiedNum> for UnifiedNum { type Output = UnifiedNum; fn mul(self, rhs: &UnifiedNum) -> Self::Output { - let bignum = &self.0 * &rhs.0; - UnifiedNum(bignum) + UnifiedNum(self.0 * rhs.0) } } -impl<'a> Sum<&'a UnifiedNum> for UnifiedNum { - fn sum>(iter: I) -> Self { - let sum_uint = iter.map(|big_num| &big_num.0).sum(); +impl<'a> Sum<&'a UnifiedNum> for Option { + fn sum>(mut iter: I) -> Self { + iter.try_fold(0_u64, |acc, unified| acc.checked_add(unified.0)) + .map(UnifiedNum) + } +} - Self(sum_uint) +impl CheckedAdd for UnifiedNum { + fn checked_add(&self, v: &Self) -> Option { + self.0.checked_add(v.0).map(Self) } } impl CheckedSub for UnifiedNum { fn checked_sub(&self, v: &Self) -> Option { - self.0.checked_sub(&v.0).map(Self) + self.0.checked_sub(v.0).map(Self) + } +} + +impl CheckedMul for UnifiedNum { + fn checked_mul(&self, v: &Self) -> Option { + self.0.checked_mul(v.0).map(Self) + } +} + +impl CheckedDiv for UnifiedNum { + fn checked_div(&self, v: &Self) -> Option { + self.0.checked_div(v.0).map(Self) + } +} + +impl CheckedRem for UnifiedNum { + fn checked_rem(&self, v: &Self) -> Option { + self.0.checked_rem(v.0).map(Self) } } @@ -249,6 +256,19 @@ mod test { use super::*; use num::Zero; + #[test] + fn unified_num_sum() { + let num_max = UnifiedNum(u64::MAX); + let num_1 = UnifiedNum(1); + let num_5 = UnifiedNum(5); + + let succeeding_sum: Option = [num_1, num_5].iter().sum(); + let overflow_sum: Option = [num_1, num_max].iter().sum(); + + assert_eq!(Some(UnifiedNum(6)), succeeding_sum); + assert_eq!(None, overflow_sum); + } + #[test] fn unified_num_displays_correctly() { let one = UnifiedNum::from(100_000_000); @@ -289,7 +309,7 @@ mod test { // 321.00000999 let same_unified = UnifiedNum::from(32_100_000_777_u64); assert_eq!( - same_unified.0, + BigNum::from(same_unified.0), same_unified.to_precision(same_precision), "It should not make any adjustments to the precision" ); diff --git a/sentry/src/middleware/auth.rs b/sentry/src/middleware/auth.rs index 82c9b6548..ce404e98a 100644 --- a/sentry/src/middleware/auth.rs +++ b/sentry/src/middleware/auth.rs @@ -133,7 +133,7 @@ mod test { use primitives::util::tests::prep_db::{AUTH, IDS}; - use primitives::{config::configuration}; + use primitives::config::configuration; use deadpool::managed::Object; From a38e0f22503b5cab05c44dc42d7b71d8ef3a5ab1 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Tue, 23 Mar 2021 13:17:57 +0200 Subject: [PATCH 23/49] primitives - UnifiedNum - Update doc --- primitives/src/unified_num.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/primitives/src/unified_num.rs b/primitives/src/unified_num.rs index cc1a532cb..4c0d22343 100644 --- a/primitives/src/unified_num.rs +++ b/primitives/src/unified_num.rs @@ -10,7 +10,10 @@ use std::{ use crate::BigNum; -/// Unified precision Number with precision 8 +/// Unified Number with a precision of 8 digits after the decimal point +/// The number can be a maximum of `u64::MAX` (the underlying type), +/// or in a `UnifiedNum` value `184_467_440_737.09551615` +/// The actual number is handled as a unsigned number and only the display shows the decimal point #[derive( Clone, Copy, From 590352789b3284ee029b86821df116fb8decd3ee Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Tue, 23 Mar 2021 13:44:55 +0200 Subject: [PATCH 24/49] primitives - UnifiedNum - fix test comment --- primitives/src/unified_num.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/primitives/src/unified_num.rs b/primitives/src/unified_num.rs index 4c0d22343..11c13dd70 100644 --- a/primitives/src/unified_num.rs +++ b/primitives/src/unified_num.rs @@ -309,7 +309,7 @@ mod test { "It should floor the result of USDT" ); - // 321.00000999 + // 321.00000777 let same_unified = UnifiedNum::from(32_100_000_777_u64); assert_eq!( BigNum::from(same_unified.0), From b6ec00c8df179dce778c2fc53edab5d5404504d6 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Tue, 23 Mar 2021 18:08:26 +0200 Subject: [PATCH 25/49] add todos for #381 --- sentry/src/routes/channel.rs | 5 +++++ validator_worker/src/core/events.rs | 4 ++++ validator_worker/src/producer.rs | 4 ++++ 3 files changed, 13 insertions(+) diff --git a/sentry/src/routes/channel.rs b/sentry/src/routes/channel.rs index e5178d5dd..0c426d928 100644 --- a/sentry/src/routes/channel.rs +++ b/sentry/src/routes/channel.rs @@ -201,6 +201,11 @@ pub async fn insert_events( .remove("events") .ok_or_else(|| ResponseError::BadRequest("invalid request".to_string()))?; + // + // TODO #381: AIP#61 Spender Aggregator should be called + // + + app.event_aggregator .record(app, &channel_id, session, auth, events) .await?; diff --git a/validator_worker/src/core/events.rs b/validator_worker/src/core/events.rs index a12896fb0..d47b81f74 100644 --- a/validator_worker/src/core/events.rs +++ b/validator_worker/src/core/events.rs @@ -6,6 +6,10 @@ use primitives::{BalancesMap, BigNum, Channel, DomainError}; use crate::core::fees::get_balances_after_fees_tree; + +// +// TODO #381: AIP#61 Remove the fees and use the new Spender Aggregates +// pub(crate) fn merge_aggrs( accounting: &Accounting, aggregates: &[EventAggregate], diff --git a/validator_worker/src/producer.rs b/validator_worker/src/producer.rs index a562858aa..81338ca83 100644 --- a/validator_worker/src/producer.rs +++ b/validator_worker/src/producer.rs @@ -35,6 +35,10 @@ pub async fn tick( balances: Default::default(), }, }; + + // + // TODO #381: AIP#61 Merge all Spender Aggregates and create a new Accounting + // let aggrs = iface .get_event_aggregates(accounting.last_event_aggregate) From b5c4e731d0b780dbcc2d8c8a0798a2c142186ea7 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Tue, 23 Mar 2021 18:10:21 +0200 Subject: [PATCH 26/49] todo to change BalancesMap's ValidatorId to Address --- primitives/src/balances_map.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/primitives/src/balances_map.rs b/primitives/src/balances_map.rs index aa11ef6a1..876eec872 100644 --- a/primitives/src/balances_map.rs +++ b/primitives/src/balances_map.rs @@ -1,14 +1,12 @@ -use std::collections::BTreeMap; +use std::{collections::{BTreeMap, btree_map::{Entry, IntoIter, Iter, Values}}, iter::FromIterator, ops::Index}; use crate::{BigNum, ValidatorId}; -use std::collections::btree_map::{Entry, IntoIter, Iter, Values}; use serde::{Deserialize, Serialize}; -use std::iter::FromIterator; -use std::ops::Index; #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)] #[serde(transparent)] +// TODO: AIP#61 Change the `ValidatorId` to `Address` pub struct BalancesMap(BTreeMap); impl Index<&'_ ValidatorId> for BalancesMap { From c152159afafc6a5944161b7267bea01b0b537d3f Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Tue, 23 Mar 2021 18:11:21 +0200 Subject: [PATCH 27/49] primitives - UnifiedNum - (De)Serialize & docs clarification --- primitives/src/unified_num.rs | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/primitives/src/unified_num.rs b/primitives/src/unified_num.rs index 11c13dd70..da4bd6c62 100644 --- a/primitives/src/unified_num.rs +++ b/primitives/src/unified_num.rs @@ -1,19 +1,22 @@ -use num::{pow::Pow, CheckedAdd, CheckedDiv, CheckedMul, CheckedSub, Integer, One}; +use num::{traits::CheckedRem, pow::Pow, CheckedAdd, CheckedDiv, CheckedMul, CheckedSub, Integer, One}; use num_derive::{FromPrimitive, Num, NumCast, NumOps, ToPrimitive, Zero}; -use num_traits::CheckedRem; use std::{ cmp::Ordering, fmt, iter::Sum, ops::{Add, AddAssign, Div, Mul, Sub}, }; - +use serde::{Serialize, Deserialize}; use crate::BigNum; -/// Unified Number with a precision of 8 digits after the decimal point +/// Unified Number with a precision of 8 digits after the decimal point. +/// /// The number can be a maximum of `u64::MAX` (the underlying type), -/// or in a `UnifiedNum` value `184_467_440_737.09551615` -/// The actual number is handled as a unsigned number and only the display shows the decimal point +/// or in a `UnifiedNum` value `184_467_440_737.09551615`. +/// The actual number is handled as a unsigned number and only the display shows the decimal point. +/// +/// This number is (de)serialized as a Javascript number which is `f64`. +/// As far as the numbers don't exceed `2**63`, the Javascript number should be sufficient without losing precision #[derive( Clone, Copy, @@ -28,6 +31,8 @@ use crate::BigNum; Eq, PartialOrd, Ord, + Serialize, + Deserialize )] pub struct UnifiedNum(u64); From a853860c6207cbcb7f83b193652e68ea92cfe1d5 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Wed, 24 Mar 2021 09:20:25 +0200 Subject: [PATCH 28/49] primitives - UnifiedNum - fix (de)serialization --- primitives/src/unified_num.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/primitives/src/unified_num.rs b/primitives/src/unified_num.rs index da4bd6c62..38ed974fd 100644 --- a/primitives/src/unified_num.rs +++ b/primitives/src/unified_num.rs @@ -34,6 +34,7 @@ use crate::BigNum; Serialize, Deserialize )] +#[serde(transparent)] pub struct UnifiedNum(u64); impl UnifiedNum { @@ -278,7 +279,7 @@ mod test { } #[test] - fn unified_num_displays_correctly() { + fn unified_num_displays_and_de_serializes_correctly() { let one = UnifiedNum::from(100_000_000); let zero_point_one = UnifiedNum::from(10_000_000); let smallest_value = UnifiedNum::from(1); @@ -290,6 +291,8 @@ mod test { assert_eq!("0.10000000", &zero_point_one.to_string()); assert_eq!("0.00000001", &smallest_value.to_string()); assert_eq!("1449030.00567000", &random_value.to_string()); + + assert_eq!(serde_json::Value::Number(100_000_000.into()), serde_json::to_value(one).expect("Should serialize")) } #[test] From 16d2369448da3d7016b8ab2944c968dbd6aae73a Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Wed, 24 Mar 2021 18:13:57 +0200 Subject: [PATCH 29/49] primitives - Cargo - add `uuid` for CampaignId --- Cargo.lock | 10 ++++++++++ primitives/Cargo.toml | 1 + 2 files changed, 11 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index 046e3c7e6..afbb88241 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2885,6 +2885,7 @@ dependencies = [ "tokio-postgres 0.7.0", "toml", "url", + "uuid", ] [[package]] @@ -4441,6 +4442,15 @@ dependencies = [ "serde", ] +[[package]] +name = "uuid" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" +dependencies = [ + "getrandom 0.2.1", +] + [[package]] name = "validator_worker" version = "0.2.0" diff --git a/primitives/Cargo.toml b/primitives/Cargo.toml index 7fa47099e..1ad9dc917 100644 --- a/primitives/Cargo.toml +++ b/primitives/Cargo.toml @@ -25,6 +25,7 @@ slog-async = "^2.3.0" thiserror = "^1.0" chrono = { version = "0.4", features = ["serde"] } time = "0.1.42" +uuid = { version = "0.8", features = ["v4"] } # For encoding the Channel to a ChannelId ethabi = "13.0.0" # For the nonce U256 From f605639532b9bdf03a313cf36e2fe7b3353dcf0b Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Thu, 25 Mar 2021 16:16:19 +0200 Subject: [PATCH 30/49] primitives - Address - impl more traits and helper methods --- primitives/src/address.rs | 65 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 64 insertions(+), 1 deletion(-) diff --git a/primitives/src/address.rs b/primitives/src/address.rs index dba0c1fe3..8c31a562a 100644 --- a/primitives/src/address.rs +++ b/primitives/src/address.rs @@ -1,6 +1,6 @@ use hex::{FromHex, FromHexError}; use serde::{Deserialize, Serialize, Serializer}; -use std::{convert::TryFrom, fmt}; +use std::{convert::TryFrom, fmt, str::FromStr}; use thiserror::Error; use crate::{targeting::Value, DomainError, ToETHChecksum, ToHex}; @@ -29,6 +29,10 @@ impl Address { pub fn as_bytes(&self) -> &[u8; 20] { &self.0 } + + pub fn from_bytes(bytes: &[u8; 20]) -> Self { + Self(*bytes) + } } impl Serialize for Address { @@ -67,6 +71,14 @@ impl AsRef<[u8]> for Address { } } +impl FromStr for Address { + type Err = Error; + + fn from_str(s: &str) -> Result { + Ok(Self(from_bytes(s, Prefix::Insensitive)?)) + } +} + impl TryFrom<&str> for Address { type Error = Error; @@ -83,6 +95,14 @@ impl TryFrom<&String> for Address { } } +impl TryFrom<&[u8]> for Address { + type Error = Error; + + fn try_from(slice: &[u8]) -> Result { + Ok(Self(from_bytes(slice, Prefix::Insensitive)?)) + } +} + impl TryFrom for Address { type Error = DomainError; @@ -135,3 +155,46 @@ pub fn from_bytes>(from: T, prefix: Prefix) -> Result<[u8; 20], E _ => Err(Error::Length), } } + +#[cfg(feature = "postgres")] +pub mod postgres { + use super::Address; + use crate::ToETHChecksum; + use bytes::BytesMut; + use postgres_types::{FromSql, IsNull, ToSql, Type}; + use std::error::Error; + + impl<'a> FromSql<'a> for Address { + fn from_sql(ty: &Type, raw: &'a [u8]) -> Result> { + let str_slice = <&str as FromSql>::from_sql(ty, raw)?; + + Ok(str_slice.parse()?) + } + + fn accepts(ty: &Type) -> bool { + matches!(*ty, Type::TEXT | Type::VARCHAR) + } + } + + impl ToSql for Address { + fn to_sql( + &self, + ty: &Type, + w: &mut BytesMut, + ) -> Result> { + self.to_checksum().to_sql(ty, w) + } + + fn accepts(ty: &Type) -> bool { + ::accepts(ty) + } + + fn to_sql_checked( + &self, + ty: &Type, + out: &mut BytesMut, + ) -> Result> { + self.to_checksum().to_sql_checked(ty, out) + } + } +} From 33af087d1f829cc385f8a218b60308d264b1d45d Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Thu, 25 Mar 2021 16:19:56 +0200 Subject: [PATCH 31/49] primitives - campaign - CampaignId --- primitives/src/campaign.rs | 194 +++++++++++++++++++++++++++++++++---- 1 file changed, 173 insertions(+), 21 deletions(-) diff --git a/primitives/src/campaign.rs b/primitives/src/campaign.rs index 94c4b8630..75ed6c9fc 100644 --- a/primitives/src/campaign.rs +++ b/primitives/src/campaign.rs @@ -1,5 +1,6 @@ use crate::{ - channel_v5::Channel, targeting::Rules, AdUnit, Address, EventSubmission, ValidatorDesc, + channel_v5::Channel, targeting::Rules, AdUnit, Address, EventSubmission, UnifiedNum, + ValidatorDesc, }; use chrono::{ @@ -9,15 +10,156 @@ use chrono::{ use serde::{Deserialize, Serialize}; use serde_with::with_prefix; -pub use pricing::{Pricing, PricingBounds}; -pub use validators::{ValidatorRole, Validators}; +pub use { + campaign_id::CampaignId, + pricing::{Pricing, PricingBounds}, + validators::{ValidatorRole, Validators}, +}; with_prefix!(prefix_active "active_"); -#[derive(Debug, Serialize, Deserialize)] +mod campaign_id { + use crate::ToHex; + use hex::{FromHex, FromHexError}; + use serde::{ + de::{self, Visitor}, + Deserialize, Deserializer, Serialize, Serializer, + }; + use std::{fmt, str::FromStr}; + use thiserror::Error; + use uuid::Uuid; + + #[derive(Debug, Clone, Copy, PartialEq, Eq)] + /// an Id of 16 bytes, (de)serialized as a `0x` prefixed hex + /// In this implementation of the `CampaignId` the value is generated from a `Uuid::new_v4().to_simple()` + pub struct CampaignId([u8; 16]); + + impl CampaignId { + pub fn new() -> Self { + Self::default() + } + + pub fn as_bytes(&self) -> &[u8; 16] { + &self.0 + } + + pub fn from_bytes(bytes: &[u8; 16]) -> Self { + Self(*bytes) + } + } + + impl Default for CampaignId { + fn default() -> Self { + Self(*Uuid::new_v4().as_bytes()) + } + } + + impl AsRef<[u8]> for CampaignId { + fn as_ref(&self) -> &[u8] { + &self.0 + } + } + + impl AsRef<[u8; 16]> for CampaignId { + fn as_ref(&self) -> &[u8; 16] { + &self.0 + } + } + + #[derive(Debug, Error)] + pub enum Error { + /// the `0x` prefix is missing + #[error("Expected a `0x` prefix")] + ExpectedPrefix, + #[error(transparent)] + InvalidHex(#[from] FromHexError), + } + + impl FromStr for CampaignId { + type Err = Error; + + fn from_str(s: &str) -> Result { + match s.strip_prefix("0x") { + Some(hex) => Ok(Self(<[u8; 16]>::from_hex(hex)?)), + None => Err(Error::ExpectedPrefix), + } + } + } + + impl fmt::Display for CampaignId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(&self.0.to_hex_prefixed()) + } + } + + impl Serialize for CampaignId { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(&self.0.to_hex_prefixed()) + } + } + + impl<'de> Deserialize<'de> for CampaignId { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + deserializer.deserialize_str(StringIdVisitor) + } + } + + struct StringIdVisitor; + + impl<'de> Visitor<'de> for StringIdVisitor { + type Value = CampaignId; + + fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + formatter.write_str("a string of a `0x` prefixed hex with 16 bytes") + } + + fn visit_str(self, value: &str) -> Result + where + E: de::Error, + { + value + .parse::() + .map_err(|err| E::custom(err.to_string())) + } + + fn visit_string(self, value: String) -> Result + where + E: de::Error, + { + self.visit_str(&value) + } + } + + #[cfg(test)] + mod test { + use serde_json::{to_value, Value}; + + use super::*; + + #[test] + fn de_serializes_campaign_id() { + let id = CampaignId::new(); + + assert_eq!( + Value::String(id.0.to_hex_prefixed()), + to_value(id).expect("Should serialize") + ); + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub struct Campaign { + pub id: CampaignId, pub channel: Channel, pub creator: Address, + pub budget: UnifiedNum, pub validators: Validators, #[serde(default, skip_serializing_if = "Option::is_none")] pub title: Option, @@ -41,22 +183,6 @@ pub struct Campaign { pub active: Active, } -#[derive(Debug, Serialize, Deserialize)] -pub struct Active { - #[serde( - default, - skip_serializing_if = "Option::is_none", - with = "ts_milliseconds_option" - )] - pub from: Option>, - /// A millisecond timestamp of when the campaign should enter a withdraw period - /// (no longer accept any events other than CHANNEL_CLOSE) - /// A sane value should be lower than channel.validUntil * 1000 and higher than created - /// It's recommended to set this at least one month prior to channel.validUntil * 1000 - #[serde(with = "ts_milliseconds")] - pub to: DateTime, -} - impl Campaign { /// Matches the Channel.leader to the Campaign.spec.leader /// If they match it returns `Some`, otherwise, it returns `None` @@ -77,6 +203,32 @@ impl Campaign { None } } + + /// Returns the pricing of a given event + pub fn pricing(&self, event: &str) -> Option<&Pricing> { + self.pricing_bounds + .as_ref() + .and_then(|bound| bound.get(event)) + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub struct Active { + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "ts_milliseconds_option" + )] + pub from: Option>, + // + // TODO: AIP#61 Update docs + // + /// A millisecond timestamp of when the campaign should enter a withdraw period + /// (no longer accept any events other than CHANNEL_CLOSE) + /// A sane value should be lower than channel.validUntil * 1000 and higher than created + /// It's recommended to set this at least one month prior to channel.validUntil * 1000 + #[serde(with = "ts_milliseconds")] + pub to: DateTime, } mod pricing { @@ -85,8 +237,8 @@ mod pricing { #[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] pub struct Pricing { - pub max: BigNum, pub min: BigNum, + pub max: BigNum, } #[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] From 231c52f1cda0dd42568a9e9ba9d5cc5da34211ed Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Thu, 25 Mar 2021 16:20:18 +0200 Subject: [PATCH 32/49] primitives - test - prep_db - Campaign & Addresses --- primitives/src/util/tests/prep_db.rs | 63 ++++++++++++++++++++++++++-- 1 file changed, 59 insertions(+), 4 deletions(-) diff --git a/primitives/src/util/tests/prep_db.rs b/primitives/src/util/tests/prep_db.rs index 05b857809..c827f65a7 100644 --- a/primitives/src/util/tests/prep_db.rs +++ b/primitives/src/util/tests/prep_db.rs @@ -1,15 +1,16 @@ use crate::{ + campaign::{self, Active, Validators}, channel::{Pricing, PricingBounds}, + channel_v5::{self, Nonce}, targeting::Rules, - AdUnit, BigNum, Channel, ChannelId, ChannelSpec, EventSubmission, SpecValidators, - ValidatorDesc, ValidatorId, IPFS, + AdUnit, Address, BigNum, Campaign, Channel, ChannelId, ChannelSpec, EventSubmission, + SpecValidators, UnifiedNum, ValidatorDesc, ValidatorId, IPFS, }; use chrono::{TimeZone, Utc}; use fake::faker::{Faker, Number}; use hex::FromHex; use lazy_static::lazy_static; -use std::collections::HashMap; -use std::convert::TryFrom; +use std::{collections::HashMap, convert::TryFrom}; lazy_static! { // dummy auth @@ -27,6 +28,33 @@ lazy_static! { ids }; + + pub static ref ADDRESSES: HashMap = { + let mut addresses = HashMap::new(); + + addresses.insert("leader".into(), Address::try_from("0xce07CbB7e054514D590a0262C93070D838bFBA2e").expect("failed to parse id")); + addresses.insert("follower".into(), Address::try_from("0xc91763d7f14ac5c5ddfbcd012e0d2a61ab9bded3").expect("failed to parse id")); + addresses.insert("user".into(), Address::try_from("0x20754168c00a6e58116ccfd0a5f7d1bb66c5de9d").expect("failed to parse id")); + addresses.insert("publisher".into(), Address::try_from("0xb7d3f81e857692d13e9d63b232a90f4a1793189e").expect("failed to parse id")); + addresses.insert("publisher2".into(), Address::try_from("0x2054b0c1339309597ad04ba47f4590f8cdb4e305").expect("failed to parse id")); + addresses.insert("creator".into(), Address::try_from("0x033ed90e0fec3f3ea1c9b005c724d704501e0196").expect("failed to parse id")); + addresses.insert("tester".into(), Address::try_from("0x2892f6C41E0718eeeDd49D98D648C789668cA67d").expect("failed to parse id")); + // These are the real Addresses of these stablecoins, however, they are only used for testing! + addresses.insert("DAI".into(), Address::try_from("0x89d24A6b4CcB1B6fAA2625fE562bDD9a23260359").expect("failed to parse id")); + addresses.insert("USDT".into(), Address::try_from("0xdac17f958d2ee523a2206206994597c13d831ec7").expect("failed to parse id")); + addresses.insert("USDC".into(), Address::try_from("0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48").expect("failed to parse id")); + + addresses + }; + + pub static ref TOKENS: HashMap = { + let mut tokens = HashMap::new(); + + tokens.insert("DAI".into(), "0x89d24A6b4CcB1B6fAA2625fE562bDD9a23260359".parse::
().expect("Should parse")); + + tokens + }; + // dummy auth tokens // authorization tokens pub static ref AUTH: HashMap = { @@ -57,6 +85,33 @@ lazy_static! { fee_addr: None, }; + pub static ref DUMMY_CAMPAIGN: Campaign = { + Campaign { + id: "0x936da01f9abd4d9d80c702af85c822a8".parse().expect("Should parse"), + channel: channel_v5::Channel { + leader: IDS["leader"], + follower: IDS["follower"], + guardian: IDS["tester"].to_address(), + token: TOKENS["DAI"], + nonce: Nonce::from(987_654_321_u32), + }, + creator: IDS["creator"].to_address(), + // 1000.00000000 + budget: UnifiedNum::from(100_000_000_000), + validators: Validators::new(DUMMY_VALIDATOR_LEADER.clone(), DUMMY_VALIDATOR_FOLLOWER.clone()), + title: Some("Dummy Campaign".to_string()), + pricing_bounds: Some(campaign::PricingBounds {impression: Some(campaign::Pricing { min: 1.into(), max: 10.into()}), click: Some(campaign::Pricing { min: 0.into(), max: 0.into()})}), + event_submission: Some(EventSubmission { allow: vec![] }), + ad_units: vec![], + targeting_rules: Rules::new(), + created: Utc.ymd(2021, 2, 1).and_hms(7,0,0), + active: Active { + to: Utc.ymd(2099, 1, 30).and_hms(0,0,0), + from: None, + }, + } + }; + pub static ref DUMMY_CHANNEL: Channel = { let nonce = BigNum::from(::between(100_000_000, 999_999_999)); From b9b2ba0c799c46ad8f25f035f2844aadbb42f386 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Thu, 25 Mar 2021 16:20:50 +0200 Subject: [PATCH 33/49] primitives - channel_v5 - Channel - derive Eq --- primitives/src/channel_v5.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/primitives/src/channel_v5.rs b/primitives/src/channel_v5.rs index 9c6ba6259..494833834 100644 --- a/primitives/src/channel_v5.rs +++ b/primitives/src/channel_v5.rs @@ -4,7 +4,7 @@ use std::fmt; use crate::{Address, ChannelId, ValidatorId}; -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(rename_all = "camelCase")] pub struct Channel { pub leader: ValidatorId, From 27ffbbf196ac1b4c4bd6354d9528a3d9a0895449 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Thu, 25 Mar 2021 16:22:35 +0200 Subject: [PATCH 34/49] Prepare for AIP#61 changes with TODOs and all-around changes --- adapter/src/lib.rs | 22 +- primitives/src/balances_map.rs | 121 ++++--- primitives/src/chain.rs | 70 ++++ primitives/src/channel.rs | 8 +- primitives/src/channel_validator.rs | 7 +- primitives/src/lib.rs | 2 + primitives/src/sentry.rs | 84 ++++- primitives/src/targeting.rs | 38 +-- primitives/src/targeting/eval.rs | 40 ++- primitives/src/targeting/eval_test.rs | 24 +- primitives/src/targeting/input.rs | 140 ++++---- primitives/src/targeting/input/field.rs | 1 + primitives/src/unified_num.rs | 19 +- primitives/src/validator.rs | 24 +- sentry/src/access.rs | 8 +- sentry/src/analytics_recorder.rs | 8 +- sentry/src/db/event_aggregate.rs | 4 +- sentry/src/event_aggregator.rs | 44 +-- sentry/src/event_reducer.rs | 28 +- sentry/src/payout.rs | 104 +++--- sentry/src/routes/channel.rs | 1 - validator_worker/src/core/events.rs | 126 +++----- validator_worker/src/core/fees.rs | 338 -------------------- validator_worker/src/core/follower_rules.rs | 82 ++--- validator_worker/src/lib.rs | 9 +- validator_worker/src/producer.rs | 10 +- 26 files changed, 607 insertions(+), 755 deletions(-) create mode 100644 primitives/src/chain.rs delete mode 100644 validator_worker/src/core/fees.rs diff --git a/adapter/src/lib.rs b/adapter/src/lib.rs index 55fd58f6b..e13b74cc8 100644 --- a/adapter/src/lib.rs +++ b/adapter/src/lib.rs @@ -6,13 +6,13 @@ use std::error::Error; use chrono::{DateTime, Utc}; use hex::FromHex; -use primitives::{channel::ChannelError, BigNum, Channel, ValidatorId}; +use primitives::{channel::ChannelError, Address, BigNum, Channel, ValidatorId}; use sha2::{Digest, Sha256}; use std::convert::TryFrom; use tiny_keccak::Keccak; use web3::{ ethabi::{encode, token::Token}, - types::{Address, U256}, + types::{Address as EthAddress, U256}, }; pub use self::dummy::DummyAdapter; @@ -46,9 +46,9 @@ pub fn get_signable_state_root( Ok(res) } -pub fn get_balance_leaf(acc: &ValidatorId, amnt: &BigNum) -> Result<[u8; 32], Box> { +pub fn get_balance_leaf(acc: &Address, amnt: &BigNum) -> Result<[u8; 32], Box> { let tokens = [ - Token::Address(Address::from_slice(acc.inner())), + Token::Address(EthAddress::from_slice(acc.as_bytes())), Token::Uint( U256::from_dec_str(&amnt.to_str_radix(10)) .map_err(|_| ChannelError::InvalidArgument("failed to parse amt".into()))?, @@ -67,11 +67,11 @@ pub fn get_balance_leaf(acc: &ValidatorId, amnt: &BigNum) -> Result<[u8; 32], Bo // OnChain channel Representation pub struct EthereumChannel { - pub creator: Address, - pub token_addr: Address, + pub creator: EthAddress, + pub token_addr: EthAddress, pub token_amount: U256, pub valid_until: U256, - pub validators: Vec
, + pub validators: Vec, pub spec: [u8; 32], } @@ -121,8 +121,8 @@ impl EthereumChannel { return Err(ChannelError::InvalidArgument("invalid token amount".into())); } - let creator = Address::from_slice(creator); - let token_addr = Address::from_slice(token_addr); + let creator = EthAddress::from_slice(creator); + let token_addr = EthAddress::from_slice(token_addr); let token_amount = U256::from_dec_str(&token_amount) .map_err(|_| ChannelError::InvalidArgument("failed to parse token amount".into()))?; let valid_until = U256::from_dec_str(&valid_until.timestamp().to_string()) @@ -130,7 +130,7 @@ impl EthereumChannel { let validators = validators .iter() - .map(|v| Address::from_slice(v.inner())) + .map(|v| EthAddress::from_slice(v.as_bytes())) .collect(); Ok(Self { @@ -145,7 +145,7 @@ impl EthereumChannel { pub fn hash(&self, contract_addr: &[u8; 20]) -> [u8; 32] { let tokens = [ - Token::Address(Address::from_slice(contract_addr)), + Token::Address(EthAddress::from_slice(contract_addr)), Token::Address(self.creator.to_owned()), Token::Address(self.token_addr.to_owned()), Token::Uint(self.token_amount.to_owned()), diff --git a/primitives/src/balances_map.rs b/primitives/src/balances_map.rs index 876eec872..adce6b561 100644 --- a/primitives/src/balances_map.rs +++ b/primitives/src/balances_map.rs @@ -1,44 +1,58 @@ -use std::{collections::{BTreeMap, btree_map::{Entry, IntoIter, Iter, Values}}, iter::FromIterator, ops::Index}; +use serde::{Deserialize, Serialize}; +use std::{ + collections::{ + btree_map::{Entry, IntoIter, Iter, Values}, + BTreeMap, + }, + iter::FromIterator, + ops::Index, +}; -use crate::{BigNum, ValidatorId}; +use crate::{Address, BigNum, UnifiedNum}; -use serde::{Deserialize, Serialize}; +pub type UnifiedMap = Map; +pub type BalancesMap = Map; -#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)] +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] #[serde(transparent)] -// TODO: AIP#61 Change the `ValidatorId` to `Address` -pub struct BalancesMap(BTreeMap); +pub struct Map(BTreeMap); + +impl Default for Map { + fn default() -> Self { + Map(BTreeMap::default()) + } +} -impl Index<&'_ ValidatorId> for BalancesMap { - type Output = BigNum; +impl Index<&'_ K> for Map { + type Output = V; - fn index(&self, index: &ValidatorId) -> &Self::Output { + fn index(&self, index: &K) -> &Self::Output { self.0.index(index) } } -impl BalancesMap { - pub fn iter(&self) -> Iter<'_, ValidatorId, BigNum> { +impl Map { + pub fn iter(&self) -> Iter<'_, K, V> { self.0.iter() } - pub fn values(&self) -> Values<'_, ValidatorId, BigNum> { + pub fn values(&self) -> Values<'_, K, V> { self.0.values() } - pub fn get(&self, key: &ValidatorId) -> Option<&BigNum> { + pub fn get(&self, key: &K) -> Option<&V> { self.0.get(key) } - pub fn contains_key(&self, key: &ValidatorId) -> bool { + pub fn contains_key(&self, key: &K) -> bool { self.0.contains_key(key) } - pub fn entry(&mut self, key: ValidatorId) -> Entry<'_, ValidatorId, BigNum> { + pub fn entry(&mut self, key: K) -> Entry<'_, K, V> { self.0.entry(key) } - pub fn insert(&mut self, key: ValidatorId, value: BigNum) -> Option { + pub fn insert(&mut self, key: K, value: V) -> Option { self.0.insert(key, value) } @@ -51,18 +65,18 @@ impl BalancesMap { } } -impl FromIterator<(ValidatorId, BigNum)> for BalancesMap { - fn from_iter>(iter: I) -> Self { +impl FromIterator<(K, V)> for Map { + fn from_iter>(iter: I) -> Self { // @TODO: Is there better way to do this? - let btree_map: BTreeMap = iter.into_iter().collect(); + let btree_map: BTreeMap = iter.into_iter().collect(); - BalancesMap(btree_map) + Map(btree_map) } } -impl IntoIterator for BalancesMap { - type Item = (ValidatorId, BigNum); - type IntoIter = IntoIter; +impl IntoIterator for Map { + type Item = (K, V); + type IntoIter = IntoIter; fn into_iter(self) -> Self::IntoIter { self.0.into_iter() @@ -71,26 +85,53 @@ impl IntoIterator for BalancesMap { #[cfg(test)] mod test { + use serde_json::json; + use super::*; - use crate::util::tests::prep_db::IDS; - use crate::BigNum; + use crate::util::tests::prep_db::ADDRESSES; #[test] - fn test_balances_map_serialization() { - let data = vec![ - (IDS["leader"].clone(), BigNum::from(50_u64)), - (IDS["follower"].clone(), BigNum::from(100_u64)), - ]; + fn test_unified_map_de_serialization() { + let unified_map: UnifiedMap = vec![ + (ADDRESSES["leader"].clone(), UnifiedNum::from(50_u64)), + (ADDRESSES["follower"].clone(), UnifiedNum::from(100_u64)), + ] + .into_iter() + .collect(); + + let actual_json = serde_json::to_value(&unified_map).expect("Should serialize it"); + let expected_json = json!({ + "0xC91763D7F14ac5c5dDfBCD012e0D2A61ab9bDED3":100, + "0xce07CbB7e054514D590a0262C93070D838bFBA2e":50 + }); - let balances_map: BalancesMap = data.into_iter().collect(); + assert_eq!(expected_json, actual_json); + + let balances_map_from_json: UnifiedMap = + serde_json::from_value(actual_json).expect("Should deserialize it"); + + assert_eq!(unified_map, balances_map_from_json); + } + + #[test] + fn test_balances_map_de_serialization() { + let balances_map: BalancesMap = vec![ + (ADDRESSES["leader"].clone(), BigNum::from(50_u64)), + (ADDRESSES["follower"].clone(), BigNum::from(100_u64)), + ] + .into_iter() + .collect(); - let actual_json = serde_json::to_string(&balances_map).expect("Should serialize it"); - let expected_json = r#"{"0xC91763D7F14ac5c5dDfBCD012e0D2A61ab9bDED3":"100","0xce07CbB7e054514D590a0262C93070D838bFBA2e":"50"}"#; + let actual_json = serde_json::to_value(&balances_map).expect("Should serialize it"); + let expected_json = json!({ + "0xC91763D7F14ac5c5dDfBCD012e0D2A61ab9bDED3":"100", + "0xce07CbB7e054514D590a0262C93070D838bFBA2e":"50" + }); assert_eq!(expected_json, actual_json); let balances_map_from_json: BalancesMap = - serde_json::from_str(&actual_json).expect("Should deserialize it"); + serde_json::from_value(actual_json).expect("Should deserialize it"); assert_eq!(balances_map, balances_map_from_json); } @@ -98,15 +139,19 @@ mod test { #[test] fn test_balances_map_deserialization_with_same_keys() { // the first is ETH Checksummed, the second is lowercase! - let json = r#"{"0xC91763D7F14ac5c5dDfBCD012e0D2A61ab9bDED3":"100","0xc91763d7f14ac5c5ddfbcd012e0d2a61ab9bded3":"20","0xce07CbB7e054514D590a0262C93070D838bFBA2e":"50"}"#; + let json = json!({ + "0xC91763D7F14ac5c5dDfBCD012e0D2A61ab9bDED3":"100", + "0xc91763d7f14ac5c5ddfbcd012e0d2a61ab9bded3":"20", + "0xce07CbB7e054514D590a0262C93070D838bFBA2e":"50" + }); let actual_deserialized: BalancesMap = - serde_json::from_str(&json).expect("Should deserialize it"); + serde_json::from_value(json).expect("Should deserialize it"); let expected_deserialized: BalancesMap = vec![ - (IDS["leader"].clone(), BigNum::from(50_u64)), + (ADDRESSES["leader"].clone(), BigNum::from(50_u64)), // only the second should be accepted, as it appears second in the string and it's the latest one - (IDS["follower"].clone(), BigNum::from(20_u64)), + (ADDRESSES["follower"].clone(), BigNum::from(20_u64)), ] .into_iter() .collect(); diff --git a/primitives/src/chain.rs b/primitives/src/chain.rs new file mode 100644 index 000000000..8baba61fe --- /dev/null +++ b/primitives/src/chain.rs @@ -0,0 +1,70 @@ +use lazy_static::lazy_static; +use serde::{Deserialize, Serialize}; +use std::{collections::HashMap, fmt}; + +lazy_static! { + pub static ref CHAINS: HashMap = { + let mut map = HashMap::new(); + map.insert( + ChainId(1), + Chain { + chain_id: ChainId(1), + name: "Ethereum Mainnet", + short: "eth", + network: "mainnet", + }, + ); + + map.insert( + ChainId(5), + Chain { + chain_id: ChainId(5), + name: "Ethereum Testnet Görli", + short: "gor", + network: "goerli", + }, + ); + + map.insert( + ChainId(100), + Chain { + chain_id: ChainId(100), + name: "xDAI Chain", + short: "xdai", + network: "mainnet", + }, + ); + + map + }; +} + +/// Ethereum Virtual Machine Chain +/// see https://chainid.network +pub struct Chain { + pub chain_id: ChainId, + pub name: &'static str, + pub short: &'static str, + pub network: &'static str, +} + +#[derive(Serialize, Deserialize, Hash, Clone, Copy, Eq, PartialEq)] +#[serde(transparent)] +pub struct ChainId(u32); + +impl ChainId { + pub fn new(id: u32) -> Self { + Self(id) + } +} + +/// Default ChainId: 1 - Ethereum Mainnet +pub fn eth_mainnet() -> ChainId { + ChainId(1) +} + +impl fmt::Debug for ChainId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "ChainId({})", self.0) + } +} diff --git a/primitives/src/channel.rs b/primitives/src/channel.rs index eb883085d..e991fbfd6 100644 --- a/primitives/src/channel.rs +++ b/primitives/src/channel.rs @@ -107,7 +107,13 @@ impl Channel { self.exhausted.len() == 2 && self.exhausted.iter().all(|&x| x) } } - +// +// +// +// TODO REMOVE +// +// +// #[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] pub struct Pricing { pub max: BigNum, diff --git a/primitives/src/channel_validator.rs b/primitives/src/channel_validator.rs index f8ef7acee..18b1e91e6 100644 --- a/primitives/src/channel_validator.rs +++ b/primitives/src/channel_validator.rs @@ -6,6 +6,9 @@ use chrono::Utc; use std::cmp::PartialEq; use time::Duration; +// +// TODO: AIP#61 How relevant is this validator? Check and remove if it's obsolete +// pub trait ChannelValidator { fn is_channel_valid( config: &Config, @@ -54,7 +57,7 @@ pub trait ChannelValidator { return Err(ChannelError::MinimumDepositNotMet); } - if adapter_channel_validator.fee < config.minimal_fee { + if BigNum::from(adapter_channel_validator.fee.to_u64()) < config.minimal_fee { return Err(ChannelError::MinimumValidatorFeeNotMet); } @@ -62,7 +65,7 @@ pub trait ChannelValidator { .spec .validators .iter() - .map(|v| v.fee.clone()) + .map(|v| BigNum::from(v.fee.to_u64())) .fold(BigNum::from(0), |acc, x| acc + x); if total_validator_fee >= channel.deposit_amount { diff --git a/primitives/src/lib.rs b/primitives/src/lib.rs index d9d9f3eea..83a82ac1e 100644 --- a/primitives/src/lib.rs +++ b/primitives/src/lib.rs @@ -8,6 +8,7 @@ pub use self::{ address::Address, balances_map::BalancesMap, big_num::BigNum, + campaign::{Campaign, CampaignId}, channel::{Channel, ChannelId, ChannelSpec, SpecValidator, SpecValidators}, config::Config, event_submission::EventSubmission, @@ -34,6 +35,7 @@ pub mod ipfs; pub mod market; pub mod merkle_tree; pub mod sentry; +pub mod spender; pub mod supermarket; pub mod targeting; mod unified_num; diff --git a/primitives/src/sentry.rs b/primitives/src/sentry.rs index 62da64a76..ca999aa84 100644 --- a/primitives/src/sentry.rs +++ b/primitives/src/sentry.rs @@ -1,8 +1,7 @@ use crate::{ targeting::Rules, - validator::Type as MessageType, - validator::{ApproveState, Heartbeat, MessageTypes, NewState}, - BigNum, Channel, ChannelId, ValidatorId, + validator::{ApproveState, Heartbeat, MessageTypes, NewState, Type as MessageType}, + Address, BalancesMap, BigNum, Channel, ChannelId, ValidatorId, IPFS, }; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; @@ -114,21 +113,31 @@ pub mod message { pub enum Event { #[serde(rename_all = "camelCase")] Impression { - publisher: ValidatorId, - ad_unit: Option, - ad_slot: Option, + publisher: Address, + ad_unit: Option, + ad_slot: Option, referrer: Option, }, + #[serde(rename_all = "camelCase")] Click { - publisher: ValidatorId, - ad_unit: Option, - ad_slot: Option, + publisher: Address, + ad_unit: Option, + ad_slot: Option, referrer: Option, }, /// only the creator can send this event + #[serde(rename_all = "camelCase")] UpdateTargeting { targeting_rules: Rules }, + /// Closes the `Campaign` /// only the creator can send this event + #[serde(rename_all = "camelCase")] Close, + /// TODO: AIP#61 Check and explain who can send this event as well as when it can be received + /// A map of earners which gets merged in the `spender::Aggregate` + /// NOTE: Does **not** contain any fees! + /// This even can be used to pay to yourself, but this is irrelevant as it's your funds you are paying yourself. + #[serde(rename_all = "camelCase")] + Pay { payout: BalancesMap }, } impl Event { @@ -139,19 +148,30 @@ impl Event { pub fn is_impression_event(&self) -> bool { matches!(self, Event::Impression { .. }) } + + pub fn as_str(&self) -> &str { + self.as_ref() + } } -impl fmt::Display for Event { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { +impl AsRef for Event { + fn as_ref(&self) -> &str { match *self { - Event::Impression { .. } => write!(f, "IMPRESSION"), - Event::Click { .. } => write!(f, "CLICK"), - Event::UpdateTargeting { .. } => write!(f, "UPDATE_TARGETING"), - Event::Close => write!(f, "CLOSE"), + Event::Impression { .. } => "IMPRESSION", + Event::Click { .. } => "CLICK", + Event::UpdateTargeting { .. } => "UPDATE_TARGETING", + Event::Close => "CLOSE", + Event::Pay { .. } => "PAY", } } } +impl fmt::Display for Event { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.as_ref()) + } +} + #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)] pub struct Earner { #[serde(rename = "publisher")] @@ -171,8 +191,8 @@ pub struct EventAggregate { #[serde(rename_all = "camelCase")] pub struct AggregateEvents { #[serde(default, skip_serializing_if = "Option::is_none")] - pub event_counts: Option>, - pub event_payouts: HashMap, + pub event_counts: Option>, + pub event_payouts: HashMap, } #[derive(Debug, Serialize, Deserialize)] @@ -365,3 +385,33 @@ mod postgres { to_sql_checked!(); } } + +#[cfg(test)] +mod test { + use super::*; + use crate::util::tests::prep_db::{ADDRESSES, DUMMY_IPFS}; + use serde_json::json; + + #[test] + pub fn de_serialize_events() { + let click = Event::Click { + publisher: ADDRESSES["publisher"], + ad_unit: Some(DUMMY_IPFS[0].clone()), + ad_slot: Some(DUMMY_IPFS[1].clone()), + referrer: Some("some_referrer".to_string()), + }; + + let click_json = json!({ + "type": "CLICK", + "publisher": "0xB7d3F81E857692d13e9D63b232A90F4A1793189E", + "adUnit": "QmcUVX7fvoLMM93uN2bD3wGTH8MXSxeL8hojYfL2Lhp7mR", + "adSlot": "Qmasg8FrbuSQpjFu3kRnZF9beg8rEBFrqgi1uXDRwCbX5f", + "referrer": "some_referrer" + }); + + pretty_assertions::assert_eq!( + click_json, + serde_json::to_value(click).expect("should serialize") + ); + } +} diff --git a/primitives/src/targeting.rs b/primitives/src/targeting.rs index 45367c555..159868473 100644 --- a/primitives/src/targeting.rs +++ b/primitives/src/targeting.rs @@ -1,4 +1,4 @@ -use crate::{channel::Pricing, BigNum, Channel}; +use crate::{campaign::Pricing, BigNum, Campaign}; pub use eval::*; use serde_json::Number; @@ -9,25 +9,15 @@ pub use input::{field::GetField, Input}; mod eval; pub mod input; -pub fn get_pricing_bounds(channel: &Channel, event_type: &str) -> Pricing { - channel - .spec +pub fn get_pricing_bounds(campaign: &Campaign, event_type: &str) -> Pricing { + campaign .pricing_bounds .as_ref() .and_then(|pricing_bounds| pricing_bounds.get(event_type)) .cloned() - .unwrap_or_else(|| { - if event_type == "IMPRESSION" { - Pricing { - min: channel.spec.min_per_impression.clone().max(1.into()), - max: channel.spec.max_per_impression.clone().max(1.into()), - } - } else { - Pricing { - min: 0.into(), - max: 0.into(), - } - } + .unwrap_or_else(|| Pricing { + min: 0.into(), + max: 0.into(), }) } @@ -67,9 +57,9 @@ impl Output { } } -impl From<&Channel> for Output { - fn from(channel: &Channel) -> Self { - let price = match &channel.spec.pricing_bounds { +impl From<&Campaign> for Output { + fn from(campaign: &Campaign) -> Self { + let price = match &campaign.pricing_bounds { Some(pricing_bounds) => pricing_bounds .to_vec() .into_iter() @@ -112,11 +102,11 @@ mod test { #[test] fn test_output_from_channel() { - use crate::channel::{Pricing, PricingBounds}; - use crate::util::tests::prep_db::DUMMY_CHANNEL; + use crate::campaign::{Pricing, PricingBounds}; + use crate::util::tests::prep_db::DUMMY_CAMPAIGN; - let mut channel = DUMMY_CHANNEL.clone(); - channel.spec.pricing_bounds = Some(PricingBounds { + let mut campaign = DUMMY_CAMPAIGN.clone(); + campaign.pricing_bounds = Some(PricingBounds { impression: Some(Pricing { min: 1_000.into(), max: 2_000.into(), @@ -127,7 +117,7 @@ mod test { }), }); - let output = Output::from(&channel); + let output = Output::from(&campaign); assert_eq!(true, output.show); assert_eq!(1.0, output.boost); diff --git a/primitives/src/targeting/eval.rs b/primitives/src/targeting/eval.rs index dacc69bb2..f15b223c8 100644 --- a/primitives/src/targeting/eval.rs +++ b/primitives/src/targeting/eval.rs @@ -1,4 +1,4 @@ -use crate::BigNum; +use crate::{Address, BigNum}; use lazy_static::lazy_static; use serde::{Deserialize, Serialize}; use serde_json::{value::Value as SerdeValue, Number}; @@ -13,7 +13,7 @@ use std::{ pub use rules::Rules; use super::{ - input::{channel::Getter as ChannelGetter, Get}, + input::{campaign::Getter as ChannelGetter, Get}, Input, Output, }; @@ -26,16 +26,22 @@ pub enum Error { TypeError, UnknownVariable, } -pub const DAI_ADDR: &str = "0x89d24A6b4CcB1B6fAA2625fE562bDD9a23260359"; -pub const USDT_ADDR: &str = "0xdac17f958d2ee523a2206206994597c13d831ec7"; -pub const USDC_ADDR: &str = "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48"; lazy_static! { - pub static ref DEPOSIT_ASSETS_MAP: HashMap = { + pub static ref DAI_ADDR: Address = "0x89d24A6b4CcB1B6fAA2625fE562bDD9a23260359" + .parse() + .expect("Valid Address"); + pub static ref USDT_ADDR: Address = "0xdac17f958d2ee523a2206206994597c13d831ec7" + .parse() + .expect("Valid Address"); + pub static ref USDC_ADDR: Address = "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48" + .parse() + .expect("Valid Address"); + pub static ref DEPOSIT_ASSETS_MAP: HashMap = { let mut assets = HashMap::new(); - assets.insert(DAI_ADDR.into(), BigNum::from(10u64.pow(18))); - assets.insert(USDT_ADDR.into(), BigNum::from(10u64.pow(6))); - assets.insert(USDC_ADDR.into(), BigNum::from(10u64.pow(18))); + assets.insert(*DAI_ADDR, BigNum::from(10u64.pow(18))); + assets.insert(*USDT_ADDR, BigNum::from(10u64.pow(6))); + assets.insert(*USDC_ADDR, BigNum::from(10u64.pow(18))); assets }; @@ -996,13 +1002,17 @@ fn eval(input: &Input, output: &mut Output, rule: &Rule) -> Result // if there is no way to get the deposit_asset, then fail with UnknownVariable // since we can't calculate the price in USD - let deposit_asset = match &input.channel { - Some(Get::Getter(ChannelGetter::Full(full_channel))) => { - Ok(full_channel.channel.deposit_asset.clone()) - } - Some(Get::Getter(ChannelGetter::Market(channel))) => { - Ok(channel.deposit_asset.clone()) + let deposit_asset = match &input.campaign { + Some(Get::Getter(ChannelGetter::Full(full_campaign))) => { + Ok(full_campaign.campaign.channel.token) } + // + // TODO: AIP#61 Replace with Campaign + // + Some(Get::Getter(ChannelGetter::Market(channel))) => channel + .deposit_asset + .parse::
() + .map_err(|_| Error::TypeError), // In case of a Values - we don't have the deposit_asset on hand so we fail in that case // In case of None we also fail _ => Err(Error::UnknownVariable), diff --git a/primitives/src/targeting/eval_test.rs b/primitives/src/targeting/eval_test.rs index 8352d9ddc..e756b5bca 100644 --- a/primitives/src/targeting/eval_test.rs +++ b/primitives/src/targeting/eval_test.rs @@ -3,7 +3,7 @@ use chrono::{TimeZone, Utc}; use super::*; use crate::{ targeting::input, - util::tests::prep_db::{DUMMY_CHANNEL, DUMMY_IPFS, IDS}, + util::tests::prep_db::{ADDRESSES, DUMMY_CAMPAIGN, DUMMY_IPFS}, BalancesMap, }; @@ -19,14 +19,14 @@ fn get_default_input() -> Input { global: input::Global { ad_slot_id: "ad_slot_id Value".to_string(), ad_slot_type: "ad_slot_type Value".to_string(), - publisher_id: IDS["leader"], + publisher_id: ADDRESSES["leader"], country: Some("bg".to_string()), event_type: "IMPRESSION".to_string(), seconds_since_epoch: Utc.ymd(2020, 11, 06).and_hms(12, 0, 0), user_agent_os: Some("os".to_string()), user_agent_browser_family: Some("family".to_string()), }, - channel: None, + campaign: None, balances: None, ad_unit_id: Some(DUMMY_IPFS[0].clone()), ad_slot: None, @@ -34,7 +34,7 @@ fn get_default_input() -> Input { // Set the Channel, Balances and AdUnit for the Input init_input - .with_channel(DUMMY_CHANNEL.clone()) + .with_campaign(DUMMY_CAMPAIGN.clone()) .with_balances(input_balances) } @@ -270,11 +270,11 @@ mod dsl_test { #[test] fn test_set_eval() { - use crate::channel::{Pricing, PricingBounds}; - use crate::util::tests::prep_db::DUMMY_CHANNEL; + use crate::campaign::{Pricing, PricingBounds}; + use crate::util::tests::prep_db::DUMMY_CAMPAIGN; - let mut channel = DUMMY_CHANNEL.clone(); - channel.spec.pricing_bounds = Some(PricingBounds { + let mut campaign = DUMMY_CAMPAIGN.clone(); + campaign.pricing_bounds = Some(PricingBounds { impression: Some(Pricing { min: 1_000.into(), max: 2_000.into(), @@ -286,7 +286,7 @@ mod dsl_test { }); let input = get_default_input(); - let mut output = Output::from(&channel); + let mut output = Output::from(&campaign); assert_eq!(Some(&BigNum::from(1_000)), output.price.get("IMPRESSION")); @@ -1376,9 +1376,9 @@ mod string_and_array { price: Default::default(), }; for (key, value) in &*DEPOSIT_ASSETS_MAP { - let mut asset_channel = DUMMY_CHANNEL.clone(); - asset_channel.deposit_asset = key.to_string(); - let input = get_default_input().with_channel(asset_channel); + let mut asset_campaign = DUMMY_CAMPAIGN.clone(); + asset_campaign.channel.token = *key; + let input = get_default_input().with_campaign(asset_campaign); let amount_crypto = BigNum::from(100).mul(value); let amount_usd = Some(Value::Number( diff --git a/primitives/src/targeting/input.rs b/primitives/src/targeting/input.rs index 1fcb43239..6cdb03b5f 100644 --- a/primitives/src/targeting/input.rs +++ b/primitives/src/targeting/input.rs @@ -1,5 +1,5 @@ use super::{Error, Value}; -use crate::{ToETHChecksum, ValidatorId, IPFS}; +use crate::{Address, ToETHChecksum, IPFS}; use chrono::{serde::ts_seconds, DateTime, Utc}; use serde::{Deserialize, Serialize}; @@ -34,7 +34,7 @@ pub struct Input { #[serde(flatten)] pub global: Global, #[serde(flatten)] - pub channel: Option, + pub campaign: Option, #[serde(flatten)] pub balances: Option, #[serde(skip_serializing_if = "Option::is_none")] @@ -46,8 +46,10 @@ pub struct Input { impl Input { /// Sets the Channel Getter - pub fn with_channel(mut self, channel: crate::Channel) -> Self { - self.channel = Some(Get::Getter(channel::Getter::from_channel(&self, channel))); + pub fn with_campaign(mut self, campaign: crate::Campaign) -> Self { + self.campaign = Some(Get::Getter(campaign::Getter::from_campaign( + &self, campaign, + ))); self } @@ -56,7 +58,7 @@ impl Input { mut self, channel: crate::supermarket::units_for_slot::response::Channel, ) -> Self { - self.channel = Some(Get::Getter(channel::Getter::from_market(channel))); + self.campaign = Some(Get::Getter(campaign::Getter::from_market(channel))); self } @@ -104,7 +106,7 @@ impl GetField for Input { match field { Field::AdView(ad_view) => self.ad_view.get(ad_view), Field::Global(global) => self.global.get(global), - Field::Channel(channel) => self.channel.get(channel).flatten(), + Field::Channel(channel) => self.campaign.get(channel).flatten(), Field::Balances(balances) => self.balances.get(balances), Field::AdSlot(ad_slot) => self.ad_slot.get(ad_slot).flatten(), Field::AdUnit(ad_unit) => match ad_unit { @@ -146,7 +148,7 @@ impl GetField for AdView { pub struct Global { pub ad_slot_id: String, pub ad_slot_type: String, - pub publisher_id: ValidatorId, + pub publisher_id: Address, pub country: Option, pub event_type: String, #[serde(with = "ts_seconds")] @@ -209,19 +211,19 @@ impl GetField for AdSlot { } } -pub mod channel { +pub mod campaign { use serde::Deserialize; use super::{field, Get, GetField, Value}; - use crate::{targeting::get_pricing_bounds, BigNum, ChannelId, ValidatorId}; + use crate::{targeting::get_pricing_bounds, Address, BigNum, CampaignId, ToHex}; - pub type GetChannel = Get; + pub type GetCampaign = Get; #[derive(Debug, Clone, Deserialize, PartialEq)] #[serde(rename_all = "camelCase")] pub struct Values { - pub advertiser_id: ValidatorId, - pub campaign_id: ChannelId, + pub advertiser_id: Address, + pub campaign_id: CampaignId, pub campaign_seconds_active: u64, pub campaign_seconds_duration: u64, pub campaign_budget: BigNum, @@ -230,27 +232,30 @@ pub mod channel { } #[derive(Debug, Clone, PartialEq)] - pub struct FullChannel { - pub channel: crate::Channel, + pub struct FullCampaign { + pub campaign: crate::Campaign, pub(super) event_type: String, } #[derive(Debug, Clone, PartialEq)] /// The Getter for a Field that requires Channel can be either: - /// - a Full Channel + /// - a Full Campaign /// - a Channel coming from the Supermarket - /// Since only the Full Channel can get the pricing bounds, - /// we wrap the Channel as well as the event_type of the Input + /// Since only the Full Campaign can get the pricing bounds, + /// we wrap the Campaign as well as the event_type of the Input pub enum Getter { - Full(FullChannel), + Full(FullCampaign), + // + // TODO: AIP#61 Change to Campaign + // Market(crate::supermarket::units_for_slot::response::Channel), } impl Getter { /// Input is used to set the Event Type of the Getter - pub fn from_channel(input: &super::Input, channel: crate::Channel) -> Self { - Self::Full(FullChannel { - channel, + pub fn from_campaign(input: &super::Input, campaign: crate::Campaign) -> Self { + Self::Full(FullCampaign { + campaign, event_type: input.global.event_type.clone(), }) } @@ -268,27 +273,27 @@ pub mod channel { match field { field::Channel::AdvertiserId => Some(Value::String(match self { Get::Getter(getter) => match getter { - Getter::Full(FullChannel { channel, .. }) => { - channel.creator.to_hex_prefix_string() + Getter::Full(FullCampaign { campaign, .. }) => { + campaign.creator.to_hex_prefixed() } - Getter::Market(s_channel) => s_channel.creator.to_hex_prefix_string(), + Getter::Market(s_channel) => s_channel.creator.to_hex_prefixed(), }, - Get::Value(Values { advertiser_id, .. }) => { - advertiser_id.to_hex_prefix_string() - } + Get::Value(Values { advertiser_id, .. }) => advertiser_id.to_hex_prefixed(), })), field::Channel::CampaignId => Some(Value::String(match self { Get::Getter(getter) => match getter { - Getter::Full(FullChannel { channel, .. }) => channel.id.to_string(), - Getter::Market(s_channel) => s_channel.id.to_string(), + Getter::Full(FullCampaign { campaign, .. }) => { + campaign.id.to_hex_prefixed() + } + Getter::Market(s_channel) => s_channel.id.to_hex_prefixed(), }, - Get::Value(Values { campaign_id, .. }) => campaign_id.to_string(), + Get::Value(Values { campaign_id, .. }) => campaign_id.to_hex_prefixed(), })), field::Channel::CampaignSecondsActive => Some(Value::Number(match self { Get::Getter(getter) => { let (active_from, created) = match getter { - Getter::Full(FullChannel { channel, .. }) => { - (channel.spec.active_from, channel.spec.created) + Getter::Full(FullCampaign { campaign, .. }) => { + (campaign.active.from, campaign.created) } Getter::Market(s_channel) => { (s_channel.spec.active_from, s_channel.spec.created) @@ -311,12 +316,10 @@ pub mod channel { })), field::Channel::CampaignSecondsDuration => Some(Value::Number(match self { Get::Getter(getter) => { - let (withdraw_period_start, active_from, created) = match getter { - Getter::Full(FullChannel { channel, .. }) => ( - channel.spec.withdraw_period_start, - channel.spec.active_from, - channel.spec.created, - ), + let (active_to, active_from, created) = match getter { + Getter::Full(FullCampaign { campaign, .. }) => { + (campaign.active.to, campaign.active.from, campaign.created) + } Getter::Market(s_channel) => ( s_channel.spec.withdraw_period_start, s_channel.spec.active_from, @@ -324,7 +327,7 @@ pub mod channel { ), }; - let duration = withdraw_period_start - active_from.unwrap_or(created); + let duration = active_to - active_from.unwrap_or(created); let seconds = duration .to_std() @@ -340,7 +343,7 @@ pub mod channel { })), field::Channel::CampaignBudget => Some(Value::BigNum(match self { Get::Getter(getter) => match getter { - Getter::Full(FullChannel { channel, .. }) => channel.deposit_amount.clone(), + Getter::Full(FullCampaign { campaign, .. }) => campaign.budget.to_bignum(), Getter::Market(s_channel) => s_channel.deposit_amount.clone(), }, Get::Value(Values { @@ -348,10 +351,10 @@ pub mod channel { }) => campaign_budget.clone(), })), field::Channel::EventMinPrice => match self { - Get::Getter(Getter::Full(FullChannel { - channel, + Get::Getter(Getter::Full(FullCampaign { + campaign, event_type, - })) => Some(Value::BigNum(get_pricing_bounds(channel, event_type).min)), + })) => Some(Value::BigNum(get_pricing_bounds(campaign, event_type).min)), // The supermarket Channel, does not have enough information to return the event_min_price Get::Getter(Getter::Market(_)) => None, Get::Value(Values { @@ -359,10 +362,10 @@ pub mod channel { }) => event_min_price.clone().map(Value::BigNum), }, field::Channel::EventMaxPrice => match self { - Get::Getter(Getter::Full(FullChannel { - channel, + Get::Getter(Getter::Full(FullCampaign { + campaign, event_type, - })) => Some(Value::BigNum(get_pricing_bounds(channel, event_type).max)), + })) => Some(Value::BigNum(get_pricing_bounds(campaign, event_type).max)), // The supermarket Channel, does not have enough information to return the event_max_price Get::Getter(Getter::Market(_)) => None, Get::Value(Values { @@ -376,7 +379,7 @@ pub mod channel { pub mod balances { use super::{field, Get, GetField, Value}; - use crate::{BalancesMap, BigNum, ValidatorId}; + use crate::{Address, BalancesMap, BigNum}; use serde::Deserialize; pub type GetBalances = Get; @@ -391,7 +394,7 @@ pub mod balances { #[derive(Debug, Clone, PartialEq)] pub struct Getter { pub balances: BalancesMap, - pub(super) publisher_id: ValidatorId, + pub(super) publisher_id: Address, } impl GetField for Get { @@ -426,7 +429,7 @@ pub mod balances { mod test { use super::*; pub use crate::{ - util::tests::prep_db::{DUMMY_CHANNEL as CHANNEL, DUMMY_IPFS as IPFS, IDS}, + util::tests::prep_db::{ADDRESSES, DUMMY_CAMPAIGN as CAMPAIGN, DUMMY_IPFS as IPFS}, AdUnit, BalancesMap, }; use chrono::{TimeZone, Utc}; @@ -451,11 +454,11 @@ mod test { // Global scope, accessible everywhere, campaign-dependant "adUnitId": "Qmasg8FrbuSQpjFu3kRnZF9beg8rEBFrqgi1uXDRwCbX5f", "advertiserId": "0x033ed90e0fec3f3ea1c9b005c724d704501e0196", - "campaignId": "0x061d5e2a67d0a9a10f1c732bca12a676d83f79663a396f7d87b3e30b9b411088", + "campaignId": "0x936da01f9abd4d9d80c702af85c822a8", "campaignTotalSpent": "40", "campaignSecondsActive": 40633521, "campaignSecondsDuration": 2509030800_u64, - "campaignBudget": "1000", + "campaignBudget": "100000000000", "eventMinPrice": "1", "eventMaxPrice": "10", "publisherEarnedFromCampaign": "30", @@ -467,9 +470,12 @@ mod test { let actual_date = Utc.ymd(2020, 6, 6).and_hms(12, 0, 0); - let balances: BalancesMap = vec![(IDS["publisher"], 30.into()), (IDS["leader"], 10.into())] - .into_iter() - .collect(); + let balances: BalancesMap = vec![ + (ADDRESSES["publisher"], 30.into()), + (ADDRESSES["leader"], 10.into()), + ] + .into_iter() + .collect(); let full_input = Input { ad_view: Some(AdView { @@ -480,7 +486,7 @@ mod test { global: Global { ad_slot_id: IPFS[0].to_string(), ad_slot_type: "legacy_300x100".into(), - publisher_id: IDS["publisher"], + publisher_id: ADDRESSES["publisher"], country: Some("BG".into()), event_type: "IMPRESSION".into(), seconds_since_epoch: actual_date, @@ -488,18 +494,28 @@ mod test { user_agent_browser_family: Some("Firefox".into()), }, // Channel can only be tested with a Value, since the campaign_seconds_* are calculated based on current DateTime - channel: Some(Get::Value(channel::Values { - advertiser_id: CHANNEL.creator, - campaign_id: CHANNEL.id, + campaign: Some(Get::Value(campaign::Values { + advertiser_id: CAMPAIGN.creator, + campaign_id: CAMPAIGN.id, campaign_seconds_active: 40633521, campaign_seconds_duration: 2509030800, - campaign_budget: CHANNEL.deposit_amount.clone(), - event_min_price: Some(CHANNEL.spec.min_per_impression.clone()), - event_max_price: Some(CHANNEL.spec.max_per_impression.clone()), + campaign_budget: CAMPAIGN.budget.to_bignum(), + event_min_price: Some( + CAMPAIGN + .pricing("IMPRESSION") + .map(|price| price.min.clone()) + .expect("should have price"), + ), + event_max_price: Some( + CAMPAIGN + .pricing("IMPRESSION") + .map(|price| price.max.clone()) + .expect("Should have price"), + ), })), balances: Some(Get::Getter(balances::Getter { balances, - publisher_id: IDS["publisher"], + publisher_id: ADDRESSES["publisher"], })), ad_unit_id: Some(IPFS[1].clone()), ad_slot: Some(AdSlot { diff --git a/primitives/src/targeting/input/field.rs b/primitives/src/targeting/input/field.rs index 835d92899..715ad0ac6 100644 --- a/primitives/src/targeting/input/field.rs +++ b/primitives/src/targeting/input/field.rs @@ -148,6 +148,7 @@ impl Into for Channel { #[display(style = "camelCase")] pub enum Balances { CampaignTotalSpent, + // TODO: AIP#61 Should be dropped since we can't know PublisherEarnedFromCampaign, } diff --git a/primitives/src/unified_num.rs b/primitives/src/unified_num.rs index 38ed974fd..e3fc07392 100644 --- a/primitives/src/unified_num.rs +++ b/primitives/src/unified_num.rs @@ -1,13 +1,15 @@ -use num::{traits::CheckedRem, pow::Pow, CheckedAdd, CheckedDiv, CheckedMul, CheckedSub, Integer, One}; +use crate::BigNum; +use num::{ + pow::Pow, traits::CheckedRem, CheckedAdd, CheckedDiv, CheckedMul, CheckedSub, Integer, One, +}; use num_derive::{FromPrimitive, Num, NumCast, NumOps, ToPrimitive, Zero}; +use serde::{Deserialize, Serialize}; use std::{ cmp::Ordering, fmt, iter::Sum, ops::{Add, AddAssign, Div, Mul, Sub}, }; -use serde::{Serialize, Deserialize}; -use crate::BigNum; /// Unified Number with a precision of 8 digits after the decimal point. /// @@ -32,7 +34,7 @@ use crate::BigNum; PartialOrd, Ord, Serialize, - Deserialize + Deserialize, )] #[serde(transparent)] pub struct UnifiedNum(u64); @@ -48,6 +50,10 @@ impl UnifiedNum { self.0 } + pub fn to_bignum(&self) -> BigNum { + BigNum::from(self.0) + } + pub fn checked_add(&self, rhs: &UnifiedNum) -> Option { CheckedAdd::checked_add(self, rhs) } @@ -292,7 +298,10 @@ mod test { assert_eq!("0.00000001", &smallest_value.to_string()); assert_eq!("1449030.00567000", &random_value.to_string()); - assert_eq!(serde_json::Value::Number(100_000_000.into()), serde_json::to_value(one).expect("Should serialize")) + assert_eq!( + serde_json::Value::Number(100_000_000.into()), + serde_json::to_value(one).expect("Should serialize") + ) } #[test] diff --git a/primitives/src/validator.rs b/primitives/src/validator.rs index f17a7008a..d644c18dc 100644 --- a/primitives/src/validator.rs +++ b/primitives/src/validator.rs @@ -1,7 +1,9 @@ use serde::{Deserialize, Serialize}; use std::{convert::TryFrom, fmt, str::FromStr}; -use crate::{address::Error, targeting::Value, Address, BigNum, DomainError, ToETHChecksum, ToHex}; +use crate::{ + address::Error, targeting::Value, Address, DomainError, ToETHChecksum, ToHex, UnifiedNum, +}; pub use messages::*; @@ -45,6 +47,12 @@ impl ValidatorId { impl ToETHChecksum for ValidatorId {} +impl From<&Address> for ValidatorId { + fn from(address: &Address) -> Self { + Self(*address) + } +} + impl From<&[u8; 20]> for ValidatorId { fn from(bytes: &[u8; 20]) -> Self { Self(Address::from(bytes)) @@ -99,9 +107,9 @@ impl TryFrom for ValidatorId { pub struct ValidatorDesc { pub id: ValidatorId, #[serde(default, skip_serializing_if = "Option::is_none")] - pub fee_addr: Option, + pub fee_addr: Option
, pub url: String, - pub fee: BigNum, + pub fee: UnifiedNum, } /// Validator Message Types @@ -251,10 +259,8 @@ pub mod messages { #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(rename_all = "camelCase")] pub struct Accounting { - #[serde(rename = "lastEvAggr")] - pub last_event_aggregate: DateTime, - pub balances_before_fees: BalancesMap, pub balances: BalancesMap, + pub last_aggregate: DateTime, } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] @@ -263,6 +269,9 @@ pub mod messages { pub state_root: String, pub signature: String, pub is_healthy: bool, + // + // TODO: AIP#61 Remove exhausted property + // #[serde(default)] pub exhausted: bool, } @@ -273,6 +282,9 @@ pub mod messages { pub state_root: String, pub signature: String, pub balances: BalancesMap, + // + // TODO: AIP#61 Remove exhausted property + // #[serde(default)] pub exhausted: bool, } diff --git a/sentry/src/access.rs b/sentry/src/access.rs index 07dd7ea97..b1820e1ff 100644 --- a/sentry/src/access.rs +++ b/sentry/src/access.rs @@ -215,7 +215,7 @@ mod test { event_submission::{RateLimit, Rule}, sentry::Event, targeting::Rules, - util::tests::prep_db::{DUMMY_CHANNEL, IDS}, + util::tests::prep_db::{ADDRESSES, DUMMY_CHANNEL, IDS}, Channel, Config, EventSubmission, }; @@ -248,7 +248,7 @@ mod test { fn get_impression_events(count: i8) -> Vec { (0..count) .map(|_| Event::Impression { - publisher: IDS["publisher2"], + publisher: ADDRESSES["publisher2"], ad_unit: None, ad_slot: None, referrer: None, @@ -559,7 +559,7 @@ mod test { channel.creator = IDS["leader"]; let mixed_events = vec![ Event::Impression { - publisher: IDS["publisher2"], + publisher: ADDRESSES["publisher2"], ad_unit: None, ad_slot: None, referrer: None, @@ -609,7 +609,7 @@ mod test { channel.creator = IDS["leader"]; let mixed_events = vec![ Event::Impression { - publisher: IDS["publisher2"], + publisher: ADDRESSES["publisher2"], ad_unit: None, ad_slot: None, referrer: None, diff --git a/sentry/src/analytics_recorder.rs b/sentry/src/analytics_recorder.rs index f5999e57b..a7e3b2cfc 100644 --- a/sentry/src/analytics_recorder.rs +++ b/sentry/src/analytics_recorder.rs @@ -46,13 +46,13 @@ pub async fn record( if let Some(ad_unit) = ad_unit { db.zincr( format!("{}:{}:{}", PublisherReport::AdUnit, event, publisher), - ad_unit, + ad_unit.to_string(), 1, ) .ignore(); db.zincr( format!("{}:{}:{}", ChannelReport::AdUnit, event, publisher), - ad_unit, + ad_unit.to_string(), 1, ) .ignore(); @@ -61,13 +61,13 @@ pub async fn record( if let Some(ad_slot) = ad_slot { db.zincr( format!("{}:{}:{}", PublisherReport::AdSlot, event, publisher), - ad_slot, + ad_slot.to_string(), 1, ) .ignore(); db.zincr( format!("{}:{}:{}", PublisherReport::AdSlotPay, event, publisher), - ad_slot, + ad_slot.to_string(), pay_amount, ) .ignore(); diff --git a/sentry/src/db/event_aggregate.rs b/sentry/src/db/event_aggregate.rs index faf55e4d5..4cb2d92fa 100644 --- a/sentry/src/db/event_aggregate.rs +++ b/sentry/src/db/event_aggregate.rs @@ -10,7 +10,7 @@ use futures::pin_mut; use primitives::{ sentry::{EventAggregate, MessageResponse}, validator::{ApproveState, Heartbeat, NewState}, - BigNum, Channel, ChannelId, ValidatorId, + Address, BigNum, Channel, ChannelId, ValidatorId, }; use std::{convert::TryFrom, ops::Add}; @@ -147,7 +147,7 @@ pub async fn list_event_aggregates( struct EventData { id: ChannelId, event_type: String, - earner: Option, + earner: Option
, event_count: BigNum, event_payout: BigNum, } diff --git a/sentry/src/event_aggregator.rs b/sentry/src/event_aggregator.rs index 8f4a83b9e..f2a9e4306 100644 --- a/sentry/src/event_aggregator.rs +++ b/sentry/src/event_aggregator.rs @@ -3,8 +3,11 @@ use crate::access::Error as AccessError; use crate::db::event_aggregate::insert_event_aggregate; use crate::db::DbPool; use crate::db::{get_channel_by_id, update_targeting_rules}; -use crate::event_reducer; -use crate::payout::get_payout; +// +// TODO: AIP#61 Event Aggregator should be replaced with the Spender aggregator & Event Analytics +// +// use crate::event_reducer; +// use crate::payout::get_payout; use crate::Application; use crate::ResponseError; use crate::Session; @@ -155,24 +158,27 @@ impl EventAggregator { update_targeting_rules(&app.pool, &channel_id, &new_rules).await?; } + // + // TODO: AIP#61 Events & payouts should be separated in to Analytics & Spender Aggregator + // // Pre-computing all payouts once - let events_with_payout: Vec<(Event, Option<(ValidatorId, BigNum)>)> = events - .iter() - .filter(|ev| ev.is_click_event() || ev.is_impression_event()) - .map(|ev| { - let payout = match get_payout(&app.logger, &record.channel, &ev, &session) { - Ok(payout) => payout, - Err(err) => return Err(err), - }; - - match event_reducer::reduce(&record.channel, &mut record.aggregate, &ev, &payout) { - Ok(_) => {} - Err(err) => error!(&app.logger, "Event Reducred failed"; "error" => ?err), - } - - Ok((ev.clone(), payout)) - }) - .collect::>()?; + let events_with_payout: Vec<(Event, Option<(ValidatorId, BigNum)>)> = vec![]; /* events + .iter() + .filter(|ev| ev.is_click_event() || ev.is_impression_event()) + .map(|ev| { + let payout = match get_payout(&app.logger, &record.channel, &ev, &session) { + Ok(payout) => payout, + Err(err) => return Err(err), + }; + + match event_reducer::reduce(&record.channel, &mut record.aggregate, &ev, &payout) { + Ok(_) => {} + Err(err) => error!(&app.logger, "Event Reducred failed"; "error" => ?err), + } + + Ok((ev.clone(), payout)) + }) + .collect::>()?; */ // We don't want to save empty aggregates if record.aggregate.events.is_empty() { diff --git a/sentry/src/event_reducer.rs b/sentry/src/event_reducer.rs index 861238d10..cc7b5a8ee 100644 --- a/sentry/src/event_reducer.rs +++ b/sentry/src/event_reducer.rs @@ -1,13 +1,17 @@ use primitives::{ sentry::{AggregateEvents, Event, EventAggregate}, - BigNum, Channel, ValidatorId, + Address, BigNum, Channel, }; +// +// TODO: AIP#61 remove `allow(dead_code)` and see what should be changed for Spender Aggregate +// +#[allow(dead_code)] pub(crate) fn reduce( channel: &Channel, initial_aggr: &mut EventAggregate, ev: &Event, - payout: &Option<(ValidatorId, BigNum)>, + payout: &Option<(Address, BigNum)>, ) -> Result<(), Box> { let event_type = ev.to_string(); @@ -36,8 +40,12 @@ pub(crate) fn reduce( } Event::Close => { let close_event = AggregateEvents { - event_counts: Some(vec![(channel.creator, 1.into())].into_iter().collect()), - event_payouts: vec![(channel.creator, channel.deposit_amount.clone())] + event_counts: Some( + vec![(channel.creator.to_address(), 1.into())] + .into_iter() + .collect(), + ), + event_payouts: vec![(channel.creator.to_address(), channel.deposit_amount.clone())] .into_iter() .collect(), }; @@ -52,7 +60,7 @@ pub(crate) fn reduce( /// payable_event is either an IMPRESSION or a CLICK fn merge_payable_event( payable_event: Option<&AggregateEvents>, - payout: (ValidatorId, BigNum), + payout: (Address, BigNum), ) -> AggregateEvents { let mut payable_event = payable_event.cloned().unwrap_or_default(); @@ -78,7 +86,7 @@ mod test { use super::*; use chrono::Utc; use primitives::{ - util::tests::prep_db::{DUMMY_CHANNEL, IDS}, + util::tests::prep_db::{ADDRESSES, DUMMY_CHANNEL}, BigNum, }; @@ -96,12 +104,12 @@ mod test { }; let event = Event::Impression { - publisher: IDS["publisher"], + publisher: ADDRESSES["publisher"], ad_unit: None, ad_slot: None, referrer: None, }; - let payout = Some((IDS["publisher"], BigNum::from(1))); + let payout = Some((ADDRESSES["publisher"], BigNum::from(1))); for i in 0..101 { reduce(&channel, &mut event_aggr, &event, &payout) .expect(&format!("Should be able to reduce event #{}", i)); @@ -118,13 +126,13 @@ mod test { .event_counts .as_ref() .expect("there should be event_counts set") - .get(&IDS["publisher"]) + .get(&ADDRESSES["publisher"]) .expect("There should be myAwesomePublisher event_counts key"); assert_eq!(event_counts, &BigNum::from(101)); let event_payouts = impression_event .event_payouts - .get(&IDS["publisher"]) + .get(&ADDRESSES["publisher"]) .expect("There should be myAwesomePublisher event_payouts key"); assert_eq!(event_payouts, &BigNum::from(101)); } diff --git a/sentry/src/payout.rs b/sentry/src/payout.rs index 29eb189fe..ccc364467 100644 --- a/sentry/src/payout.rs +++ b/sentry/src/payout.rs @@ -4,14 +4,19 @@ use primitives::{ sentry::Event, targeting::Input, targeting::{eval_with_callback, get_pricing_bounds, input, Error, Output}, - BigNum, Channel, ValidatorId, + Address, BigNum, Campaign, }; use slog::{error, Logger}; use std::cmp::{max, min}; -pub type Result = std::result::Result, Error>; +pub type Result = std::result::Result, Error>; -pub fn get_payout(logger: &Logger, channel: &Channel, event: &Event, session: &Session) -> Result { +pub fn get_payout( + logger: &Logger, + campaign: &Campaign, + event: &Event, + session: &Session, +) -> Result { let event_type = event.to_string(); match event { @@ -27,31 +32,21 @@ pub fn get_payout(logger: &Logger, channel: &Channel, event: &Event, session: &S ad_slot, .. } => { - let targeting_rules = if !channel.targeting_rules.is_empty() { - channel.targeting_rules.clone() - } else { - channel.spec.targeting_rules.clone() - }; + let targeting_rules = campaign.targeting_rules.clone(); - let pricing = get_pricing_bounds(&channel, &event_type); + let pricing = get_pricing_bounds(&campaign, &event_type); if targeting_rules.is_empty() { Ok(Some((*publisher, pricing.min))) } else { - let ad_unit = ad_unit.as_ref().and_then(|ipfs| { - channel - .spec - .ad_units - .iter() - .find(|u| &u.ipfs.to_string() == ipfs) - }); + let ad_unit = ad_unit + .as_ref() + .and_then(|ipfs| campaign.ad_units.iter().find(|u| &u.ipfs == ipfs)); let input = Input { ad_view: None, global: input::Global { - // TODO: Check this one! - ad_slot_id: ad_slot.clone().unwrap_or_default(), - // TODO: Check this one! + ad_slot_id: ad_slot.as_ref().map_or(String::new(), ToString::to_string), ad_slot_type: ad_unit.map(|u| u.ad_type.clone()).unwrap_or_default(), publisher_id: *publisher, country: session.country.clone(), @@ -60,14 +55,12 @@ pub fn get_payout(logger: &Logger, channel: &Channel, event: &Event, session: &S user_agent_os: session.os.clone(), user_agent_browser_family: None, }, - // TODO: Check this one! ad_unit_id: ad_unit.map(|unit| &unit.ipfs).cloned(), - channel: None, + campaign: None, balances: None, - // TODO: Check this one as well! ad_slot: None, } - .with_channel(channel.clone()); + .with_campaign(campaign.clone()); let mut output = Output { show: true, @@ -77,7 +70,7 @@ pub fn get_payout(logger: &Logger, channel: &Channel, event: &Event, session: &S .collect(), }; - let on_type_error = |error, rule| error!(logger, "Rule evaluation error for {:?}", channel.id; "error" => ?error, "rule" => ?rule); + let on_type_error = |error, rule| error!(logger, "Rule evaluation error for {:?}", campaign.id; "error" => ?error, "rule" => ?rule); eval_with_callback(&targeting_rules, &input, &mut output, Some(on_type_error)); @@ -102,22 +95,25 @@ pub fn get_payout(logger: &Logger, channel: &Channel, event: &Event, session: &S #[cfg(test)] mod test { use super::*; - use primitives::channel::{Pricing, PricingBounds}; - use primitives::util::tests::{ - discard_logger, - prep_db::{DUMMY_CHANNEL, IDS}, + use primitives::{ + campaign::{Pricing, PricingBounds}, + util::tests::{ + discard_logger, + prep_db::{ADDRESSES, DUMMY_CAMPAIGN}, + }, }; #[test] fn get_event_payouts_pricing_bounds_impression_event() { let logger = discard_logger(); - let mut channel = DUMMY_CHANNEL.clone(); - channel.deposit_amount = 100.into(); - channel.spec.min_per_impression = 8.into(); - channel.spec.max_per_impression = 64.into(); - channel.spec.pricing_bounds = Some(PricingBounds { - impression: None, + let mut campaign = DUMMY_CAMPAIGN.clone(); + campaign.budget = 100.into(); + campaign.pricing_bounds = Some(PricingBounds { + impression: Some(Pricing { + min: 8.into(), + max: 64.into(), + }), click: Some(Pricing { min: 23.into(), max: 100.into(), @@ -125,7 +121,7 @@ mod test { }); let event = Event::Impression { - publisher: IDS["leader"], + publisher: ADDRESSES["leader"], ad_unit: None, ad_slot: None, referrer: None, @@ -138,21 +134,22 @@ mod test { os: None, }; - let payout = get_payout(&logger, &channel, &event, &session).expect("Should be OK"); + let payout = get_payout(&logger, &campaign, &event, &session).expect("Should be OK"); - let expected_option = Some((IDS["leader"], 8.into())); + let expected_option = Some((ADDRESSES["leader"], 8.into())); assert_eq!(expected_option, payout, "pricingBounds: impression event"); } #[test] fn get_event_payouts_pricing_bounds_click_event() { let logger = discard_logger(); - let mut channel = DUMMY_CHANNEL.clone(); - channel.deposit_amount = 100.into(); - channel.spec.min_per_impression = 8.into(); - channel.spec.max_per_impression = 64.into(); - channel.spec.pricing_bounds = Some(PricingBounds { - impression: None, + let mut campaign = DUMMY_CAMPAIGN.clone(); + campaign.budget = 100.into(); + campaign.pricing_bounds = Some(PricingBounds { + impression: Some(Pricing { + min: 8.into(), + max: 64.into(), + }), click: Some(Pricing { min: 23.into(), max: 100.into(), @@ -160,7 +157,7 @@ mod test { }); let event = Event::Click { - publisher: IDS["leader"], + publisher: ADDRESSES["leader"], ad_unit: None, ad_slot: None, referrer: None, @@ -173,21 +170,22 @@ mod test { os: None, }; - let payout = get_payout(&logger, &channel, &event, &session).expect("Should be OK"); + let payout = get_payout(&logger, &campaign, &event, &session).expect("Should be OK"); - let expected_option = Some((IDS["leader"], 23.into())); + let expected_option = Some((ADDRESSES["leader"], 23.into())); assert_eq!(expected_option, payout, "pricingBounds: click event"); } #[test] fn get_event_payouts_pricing_bounds_close_event() { let logger = discard_logger(); - let mut channel = DUMMY_CHANNEL.clone(); - channel.deposit_amount = 100.into(); - channel.spec.min_per_impression = 8.into(); - channel.spec.max_per_impression = 64.into(); - channel.spec.pricing_bounds = Some(PricingBounds { - impression: None, + let mut campaign = DUMMY_CAMPAIGN.clone(); + campaign.budget = 100.into(); + campaign.pricing_bounds = Some(PricingBounds { + impression: Some(Pricing { + min: 8.into(), + max: 64.into(), + }), click: Some(Pricing { min: 23.into(), max: 100.into(), @@ -203,7 +201,7 @@ mod test { os: None, }; - let payout = get_payout(&logger, &channel, &event, &session).expect("Should be OK"); + let payout = get_payout(&logger, &campaign, &event, &session).expect("Should be OK"); assert_eq!(None, payout, "pricingBounds: click event"); } diff --git a/sentry/src/routes/channel.rs b/sentry/src/routes/channel.rs index 0c426d928..cfaeff9e4 100644 --- a/sentry/src/routes/channel.rs +++ b/sentry/src/routes/channel.rs @@ -205,7 +205,6 @@ pub async fn insert_events( // TODO #381: AIP#61 Spender Aggregator should be called // - app.event_aggregator .record(app, &channel_id, session, auth, events) .await?; diff --git a/validator_worker/src/core/events.rs b/validator_worker/src/core/events.rs index d47b81f74..767a85aba 100644 --- a/validator_worker/src/core/events.rs +++ b/validator_worker/src/core/events.rs @@ -4,48 +4,42 @@ use primitives::sentry::{AggregateEvents, EventAggregate}; use primitives::validator::Accounting; use primitives::{BalancesMap, BigNum, Channel, DomainError}; -use crate::core::fees::get_balances_after_fees_tree; - - // -// TODO #381: AIP#61 Remove the fees and use the new Spender Aggregates +// TODO #381: AIP#61 Use the new Spender Aggregate and Sum all balances for the new Accounting // pub(crate) fn merge_aggrs( accounting: &Accounting, aggregates: &[EventAggregate], - channel: &Channel, + // + // TODO: AIP#61 Use Campaign and if we should check the total sum of the Balances < campaign.budget + // + _channel: &Channel, ) -> Result { - let deposit = channel.deposit_amount.clone(); - - let last_event_aggregate = [accounting.last_event_aggregate] + let last_aggregate = [accounting.last_aggregate] .iter() .chain(aggregates.iter().map(|aggr| &aggr.created)) .max() - .unwrap_or(&accounting.last_event_aggregate) + .unwrap_or(&accounting.last_aggregate) .to_owned(); // Build an intermediary balances representation - let mut balances_before_fees = accounting.balances_before_fees.clone(); - - // Merge in all the aggrs - for aggr in aggregates { - balances_before_fees = - merge_payouts_into_balances(&balances_before_fees, aggr.events.values(), &deposit)? - } - - // apply fees - let balances = get_balances_after_fees_tree(&balances_before_fees, &channel)?; + // + // TODO: AIP#61 Sum all Spender Aggregates and use that for the new Accounting + // + let balances = BalancesMap::default(); let new_accounting = Accounting { - last_event_aggregate, - balances_before_fees, + last_aggregate, balances, }; Ok(new_accounting) } -fn merge_payouts_into_balances<'a, T: Iterator>( +// +// TODO: AIP#61 Check how this should apply for the new Campaigns +// +fn _merge_payouts_into_balances<'a, T: Iterator>( balances: &BalancesMap, events: T, deposit: &BigNum, @@ -62,9 +56,7 @@ fn merge_payouts_into_balances<'a, T: Iterator>( for (acc, payout) in all_payouts { let to_add = payout.min(&remaining); - let new_balance = new_balances - .entry(acc.to_owned()) - .or_insert_with(|| 0.into()); + let new_balance = new_balances.entry(*acc).or_insert_with(|| 0.into()); *new_balance += &to_add; @@ -80,14 +72,17 @@ fn merge_payouts_into_balances<'a, T: Iterator>( mod test { use chrono::Utc; - use primitives::util::tests::prep_db::{ - DUMMY_CHANNEL, DUMMY_VALIDATOR_FOLLOWER, DUMMY_VALIDATOR_LEADER, IDS, + use primitives::{ + util::tests::prep_db::{ + ADDRESSES, DUMMY_CHANNEL, DUMMY_VALIDATOR_FOLLOWER, DUMMY_VALIDATOR_LEADER, + }, + Address, Channel, ChannelSpec, ValidatorDesc, }; - use primitives::{Channel, ChannelSpec, ValidatorDesc, ValidatorId}; use super::*; #[test] + #[ignore] fn should_merge_event_aggrs_and_apply_fees() { // fees: 100 // deposit: 10 000 @@ -106,35 +101,24 @@ mod test { }; channel.spec.validators = (leader, follower).into(); - let balances_before_fees: BalancesMap = vec![ - (IDS["publisher"].clone(), 100.into()), - (IDS["publisher2"].clone(), 200.into()), - ] - .into_iter() - .collect(); - let acc = Accounting { - last_event_aggregate: Utc::now(), - balances_before_fees, + last_aggregate: Utc::now(), balances: BalancesMap::default(), }; - let new_accounting = merge_aggrs(&acc, &[gen_ev_aggr(5, &IDS["publisher"])], &channel) - .expect("Something went wrong"); + let new_accounting = + merge_aggrs(&acc, &[gen_ev_aggr(5, &ADDRESSES["publisher"])], &channel) + .expect("Something went wrong"); assert_eq!( - new_accounting.balances_before_fees[&IDS["publisher"]], - 150.into(), - "balance of recipient incremented accordingly" - ); - assert_eq!( - new_accounting.balances[&IDS["publisher"]], + new_accounting.balances[&ADDRESSES["publisher"]], 148.into(), - "balanceAfterFees is ok" + "balances is ok" ); } #[test] + #[ignore] fn should_never_allow_exceeding_the_deposit() { let leader = ValidatorDesc { fee: 50.into(), @@ -155,41 +139,22 @@ mod test { ..DUMMY_CHANNEL.clone() }; - let balances_before_fees: BalancesMap = vec![ - (IDS["publisher"].clone(), 100.into()), - (IDS["publisher2"].clone(), 200.into()), - ] - .into_iter() - .collect(); - let acc = Accounting { - last_event_aggregate: Utc::now(), - balances_before_fees, + last_aggregate: Utc::now(), balances: BalancesMap::default(), }; - let new_accounting = merge_aggrs(&acc, &[gen_ev_aggr(1_001, &IDS["publisher"])], &channel) - .expect("Something went wrong"); + let new_accounting = merge_aggrs( + &acc, + &[gen_ev_aggr(1_001, &ADDRESSES["publisher"])], + &channel, + ) + .expect("Something went wrong"); assert_eq!( - new_accounting.balances_before_fees[&IDS["publisher"]], - 9_800.into(), - "balance of recipient incremented accordingly" - ); - assert_eq!( - new_accounting.balances_before_fees[&IDS["publisher2"]], - 200.into(), - "balances of non-recipient remains the same" - ); - assert_eq!( - new_accounting.balances[&IDS["publisher"]], + new_accounting.balances[&ADDRESSES["publisher"]], 9_702.into(), - "balanceAfterFees is ok" - ); - assert_eq!( - &new_accounting.balances_before_fees.values().sum::(), - &channel.deposit_amount, - "sum(balancesBeforeFees) == depositAmount" + "balances is ok" ); assert_eq!( &new_accounting.balances.values().sum::(), @@ -198,14 +163,13 @@ mod test { ); } - fn gen_ev_aggr(count: u64, recipient: &ValidatorId) -> EventAggregate { + // + // TODO: AIP#61 Use new Spender Aggregate + // + fn gen_ev_aggr(count: u64, recipient: &Address) -> EventAggregate { let aggregate_events = AggregateEvents { - event_counts: Some( - vec![(recipient.clone(), count.into())] - .into_iter() - .collect(), - ), - event_payouts: vec![(recipient.clone(), (count * 10).into())] + event_counts: Some(vec![(*recipient, count.into())].into_iter().collect()), + event_payouts: vec![(*recipient, (count * 10).into())] .into_iter() .collect(), }; diff --git a/validator_worker/src/core/fees.rs b/validator_worker/src/core/fees.rs deleted file mode 100644 index fb7c793a4..000000000 --- a/validator_worker/src/core/fees.rs +++ /dev/null @@ -1,338 +0,0 @@ -use num::rational::Ratio; -use num_traits::CheckedSub; -use primitives::{BalancesMap, BigNum, Channel, DomainError, ValidatorDesc}; - -pub fn get_balances_after_fees_tree( - balances: &BalancesMap, - channel: &Channel, -) -> Result { - let deposit_amount = channel.deposit_amount.clone(); - - let total_distributed = balances.iter().map(|(_, balance)| balance).sum::(); - - let validators_iter = channel.spec.validators.iter(); - let total_validators_fee = validators_iter - .map(|validator| &validator.fee) - .sum::(); - - if total_validators_fee > deposit_amount { - return Err(DomainError::RuleViolation( - "total fees <= deposit: fee constraint violated".into(), - )); - } - - if total_distributed > deposit_amount { - return Err(DomainError::RuleViolation( - "distributed <= deposit: OUTPACE rule #4".into(), - )); - } - - let deposit_to_distribute = &deposit_amount - &total_validators_fee; - - let ratio = Ratio::new(deposit_to_distribute.clone(), deposit_amount.clone()); - let fee_ratio = Ratio::new(total_distributed.clone(), deposit_amount.clone()); - - let mut balances_after_fees = BalancesMap::default(); - let mut total = BigNum::from(0); - - for (key, value) in balances.iter() { - let adjusted_balance = value * ∶ - - total += &adjusted_balance; - balances_after_fees.insert(*key, adjusted_balance); - } - - let rounding_error = if deposit_amount == total_distributed { - deposit_to_distribute.checked_sub(&total).ok_or_else(|| { - DomainError::RuleViolation("rounding_err should never be negative".to_owned()) - })? - } else { - BigNum::from(0) - }; - - let balances_after_fees = distribute_fee( - balances_after_fees, - rounding_error, - fee_ratio, - channel.spec.validators.iter(), - ); - - Ok(balances_after_fees) -} - -fn distribute_fee<'a>( - mut balances: BalancesMap, - rounding_error: BigNum, - fee_ratio: Ratio, - validators: impl Iterator, -) -> BalancesMap { - for (index, validator) in validators.enumerate() { - let fee = &validator.fee * &fee_ratio; - - let fee_rounded = if index == 0 { - &fee + &rounding_error - } else { - fee - }; - - if fee_rounded > 0.into() { - let addr = validator.fee_addr.as_ref().unwrap_or(&validator.id); - let entry = balances.entry(addr.to_owned()).or_insert_with(|| 0.into()); - - *entry += &fee_rounded; - } - } - - balances -} - -#[cfg(test)] -mod test { - use super::*; - use primitives::util::tests::prep_db::{ - DUMMY_CHANNEL, DUMMY_VALIDATOR_FOLLOWER, DUMMY_VALIDATOR_LEADER, IDS, - }; - - mod applying_fee_returns_the_same_tree_with_zero_fees { - use super::*; - fn setup_balances_map(balances_map: &BalancesMap) -> BalancesMap { - let channel = get_zero_fee_channel(); - - get_balances_after_fees_tree(balances_map, &channel) - .expect("Calculation of fees failed") - } - - #[test] - fn case_1_three_values() { - let balances_map: BalancesMap = vec![ - (IDS["publisher"].clone(), 1001.into()), - (IDS["publisher2"].clone(), 3124.into()), - (IDS["tester"].clone(), 122.into()), - ] - .into_iter() - .collect(); - - assert_eq!(setup_balances_map(&balances_map), balances_map); - } - - #[test] - fn case_2_three_simple_values() { - let balances_map: BalancesMap = vec![ - (IDS["publisher"].clone(), 1.into()), - (IDS["publisher2"].clone(), 2.into()), - (IDS["tester"].clone(), 3.into()), - ] - .into_iter() - .collect(); - - assert_eq!(setup_balances_map(&balances_map), balances_map); - } - - #[test] - fn case_3_one_value() { - let balances_map = vec![(IDS["publisher"].clone(), BigNum::from(1))] - .into_iter() - .collect(); - - assert_eq!(setup_balances_map(&balances_map), balances_map); - } - - #[test] - fn case_4_two_values() { - let balances_map = vec![ - (IDS["publisher"].clone(), 1.into()), - (IDS["publisher2"].clone(), 99_999.into()), - ] - .into_iter() - .collect(); - - assert_eq!(setup_balances_map(&balances_map), balances_map); - } - - fn get_zero_fee_channel() -> Channel { - let leader = ValidatorDesc { - fee: 0.into(), - ..DUMMY_VALIDATOR_LEADER.clone() - }; - let follower = ValidatorDesc { - fee: 0.into(), - ..DUMMY_VALIDATOR_FOLLOWER.clone() - }; - - let mut spec = DUMMY_CHANNEL.spec.clone(); - spec.validators = (leader, follower).into(); - - Channel { - deposit_amount: 100_000.into(), - spec, - ..DUMMY_CHANNEL.clone() - } - } - } - - mod applying_fee_correctly { - use super::*; - - fn setup_balances_after_fee(balances_map: BalancesMap) -> BalancesMap { - let leader = ValidatorDesc { - fee: 50.into(), - ..DUMMY_VALIDATOR_LEADER.clone() - }; - let follower = ValidatorDesc { - fee: 50.into(), - ..DUMMY_VALIDATOR_FOLLOWER.clone() - }; - - let mut spec = DUMMY_CHANNEL.spec.clone(); - spec.validators = (leader, follower).into(); - - let channel = Channel { - deposit_amount: 10_000.into(), - spec, - ..DUMMY_CHANNEL.clone() - }; - - get_balances_after_fees_tree(&balances_map, &channel) - .expect("Calculation of fees failed") - } - - #[test] - fn case_1_partially_distributed() { - let balances_map = vec![ - (IDS["publisher"].clone(), 1_000.into()), - (IDS["publisher2"].clone(), 1_200.into()), - ] - .into_iter() - .collect(); - - let expected_balances: BalancesMap = vec![ - (IDS["publisher"].clone(), 990.into()), - (IDS["publisher2"].clone(), 1_188.into()), - (IDS["leader"].clone(), 11.into()), - (IDS["follower"].clone(), 11.into()), - ] - .into_iter() - .collect(); - - let balances_after_fee = setup_balances_after_fee(balances_map); - let actual_sum: BigNum = balances_after_fee.iter().map(|(_, v)| v).sum(); - - assert_eq!( - expected_balances - .iter() - .map(|(_, value)| value) - .sum::(), - actual_sum - ); - assert_eq!(expected_balances, balances_after_fee); - } - - #[test] - fn case_2_partially_distributed_with_validator_in_the_input_balances_map() { - let balances_map = vec![ - (IDS["publisher"].clone(), 100.into()), - (IDS["publisher2"].clone(), 2_000.into()), - (IDS["leader"].clone(), 200.into()), - ] - .into_iter() - .collect(); - - let expected_balances: BalancesMap = vec![ - (IDS["publisher"].clone(), 99.into()), - (IDS["publisher2"].clone(), 1_980.into()), - (IDS["leader"].clone(), 209.into()), - (IDS["follower"].clone(), 11.into()), - ] - .into_iter() - .collect(); - - let balances_after_fee = setup_balances_after_fee(balances_map); - let actual_sum: BigNum = balances_after_fee.iter().map(|(_, v)| v).sum(); - - assert_eq!( - expected_balances - .iter() - .map(|(_, value)| value) - .sum::(), - actual_sum - ); - assert_eq!(expected_balances, balances_after_fee); - } - - #[test] - /// also testing the rounding error correction - fn case_3_fully_distributed() { - let balances_map = vec![ - (IDS["publisher"].clone(), 105.into()), - (IDS["publisher2"].clone(), 195.into()), - (IDS["tester"].clone(), 700.into()), - (IDS["user"].clone(), 5_000.into()), - (IDS["creator"].clone(), 4_000.into()), - ] - .into_iter() - .collect(); - - let expected_balances: BalancesMap = vec![ - (IDS["publisher"].clone(), 103.into()), - (IDS["publisher2"].clone(), 193.into()), - (IDS["tester"].clone(), 693.into()), - (IDS["user"].clone(), 4_950.into()), - (IDS["creator"].clone(), 3_960.into()), - (IDS["leader"].clone(), 51.into()), - (IDS["follower"].clone(), 50.into()), - ] - .into_iter() - .collect(); - - let balances_after_fee = setup_balances_after_fee(balances_map); - let actual_sum: BigNum = balances_after_fee.iter().map(|(_, v)| v).sum(); - - assert_eq!( - expected_balances - .iter() - .map(|(_, value)| value) - .sum::(), - actual_sum - ); - assert_eq!(expected_balances, balances_after_fee); - } - } - - #[test] - fn errors_when_fees_larger_that_deposit() { - let balances_map = vec![ - (IDS["publisher"].clone(), 10.into()), - (IDS["publisher2"].clone(), 10.into()), - ] - .into_iter() - .collect(); - - let leader = ValidatorDesc { - fee: 600.into(), - ..DUMMY_VALIDATOR_LEADER.clone() - }; - let follower = ValidatorDesc { - fee: 600.into(), - ..DUMMY_VALIDATOR_FOLLOWER.clone() - }; - - let mut spec = DUMMY_CHANNEL.spec.clone(); - spec.validators = (leader, follower).into(); - - let channel = Channel { - deposit_amount: 1_000.into(), - spec, - ..DUMMY_CHANNEL.clone() - }; - - let domain_error = get_balances_after_fees_tree(&balances_map, &channel) - .expect_err("Should be DomainError not allow fees sum to exceed the deposit"); - - assert_eq!( - DomainError::RuleViolation( - "total fees <= deposit: fee constraint violated".to_string() - ), - domain_error - ); - } -} diff --git a/validator_worker/src/core/follower_rules.rs b/validator_worker/src/core/follower_rules.rs index 536685ceb..fef298202 100644 --- a/validator_worker/src/core/follower_rules.rs +++ b/validator_worker/src/core/follower_rules.rs @@ -35,7 +35,7 @@ pub fn get_health(channel: &Channel, our: &BalancesMap, approved: &BalancesMap) #[cfg(test)] mod test { - use primitives::util::tests::prep_db::{DUMMY_CHANNEL, IDS}; + use primitives::util::tests::prep_db::{ADDRESSES, DUMMY_CHANNEL}; use super::*; @@ -62,7 +62,7 @@ mod test { #[test] fn is_valid_transition_a_valid_transition() { - let next = vec![(IDS["publisher"].clone(), 100.into())] + let next = vec![(ADDRESSES["publisher"].clone(), 100.into())] .into_iter() .collect(); @@ -75,8 +75,8 @@ mod test { #[test] fn is_valid_transition_more_funds_than_dummy_channel() { let next = vec![ - (IDS["publisher"].clone(), 51.into()), - (IDS["publisher2"].clone(), 50.into()), + (ADDRESSES["publisher"].clone(), 51.into()), + (ADDRESSES["publisher2"].clone(), 50.into()), ] .into_iter() .collect(); @@ -89,11 +89,11 @@ mod test { #[test] fn is_valid_transition_single_value_is_lower() { - let prev = vec![(IDS["publisher"].clone(), 55.into())] + let prev = vec![(ADDRESSES["publisher"].clone(), 55.into())] .into_iter() .collect(); - let next = vec![(IDS["publisher"].clone(), 54.into())] + let next = vec![(ADDRESSES["publisher"].clone(), 54.into())] .into_iter() .collect(); @@ -105,13 +105,13 @@ mod test { #[test] fn is_valid_transition_a_value_is_lower_but_overall_sum_is_higher() { - let prev = vec![(IDS["publisher"].clone(), 55.into())] + let prev = vec![(ADDRESSES["publisher"].clone(), 55.into())] .into_iter() .collect(); let next = vec![ - (IDS["publisher"].clone(), 54.into()), - (IDS["publisher2"].clone(), 3.into()), + (ADDRESSES["publisher"].clone(), 54.into()), + (ADDRESSES["publisher2"].clone(), 3.into()), ] .into_iter() .collect(); @@ -125,13 +125,13 @@ mod test { #[test] fn is_valid_transition_overall_sum_is_lower() { let prev = vec![ - (IDS["publisher"].clone(), 54.into()), - (IDS["publisher2"].clone(), 3.into()), + (ADDRESSES["publisher"].clone(), 54.into()), + (ADDRESSES["publisher2"].clone(), 3.into()), ] .into_iter() .collect(); - let next = vec![(IDS["publisher"].clone(), 54.into())] + let next = vec![(ADDRESSES["publisher"].clone(), 54.into())] .into_iter() .collect(); @@ -144,13 +144,13 @@ mod test { #[test] fn is_valid_transition_overall_sum_is_the_same_but_we_remove_an_entry() { let prev = vec![ - (IDS["publisher"].clone(), 54.into()), - (IDS["publisher2"].clone(), 3.into()), + (ADDRESSES["publisher"].clone(), 54.into()), + (ADDRESSES["publisher2"].clone(), 3.into()), ] .into_iter() .collect(); - let next = vec![(IDS["publisher"].clone(), 57.into())] + let next = vec![(ADDRESSES["publisher"].clone(), 57.into())] .into_iter() .collect(); @@ -163,13 +163,13 @@ mod test { #[test] fn is_valid_transition_transition_to_a_state_with_a_negative_number() { let prev = vec![ - (IDS["publisher"].clone(), 54.into()), - (IDS["publisher2"].clone(), 3.into()), + (ADDRESSES["publisher"].clone(), 54.into()), + (ADDRESSES["publisher2"].clone(), 3.into()), ] .into_iter() .collect(); - let next = vec![(IDS["publisher"].clone(), 57.into())] + let next = vec![(ADDRESSES["publisher"].clone(), 57.into())] .into_iter() .collect(); @@ -182,7 +182,7 @@ mod test { #[test] fn get_health_the_approved_balance_tree_gte_our_accounting_is_healthy() { let channel = get_dummy_channel(50); - let our = vec![(IDS["publisher"].clone(), 50.into())] + let our = vec![(ADDRESSES["publisher"].clone(), 50.into())] .into_iter() .collect(); assert!(get_health(&channel, &our, &our) >= HEALTH_THRESHOLD); @@ -191,7 +191,7 @@ mod test { get_health( &channel, &our, - &vec![(IDS["publisher"].clone(), 60.into())] + &vec![(ADDRESSES["publisher"].clone(), 60.into())] .into_iter() .collect() ) >= HEALTH_THRESHOLD @@ -200,7 +200,7 @@ mod test { #[test] fn get_health_the_approved_balance_tree_is_positive_our_accounting_is_0_and_it_is_healthy() { - let approved = vec![(IDS["publisher"].clone(), 50.into())] + let approved = vec![(ADDRESSES["publisher"].clone(), 50.into())] .into_iter() .collect(); @@ -217,10 +217,10 @@ mod test { assert!( get_health( &channel, - &vec![(IDS["publisher"].clone(), 80.into())] + &vec![(ADDRESSES["publisher"].clone(), 80.into())] .into_iter() .collect(), - &vec![(IDS["publisher"].clone(), 79.into())] + &vec![(ADDRESSES["publisher"].clone(), 79.into())] .into_iter() .collect() ) >= HEALTH_THRESHOLD @@ -229,10 +229,10 @@ mod test { assert!( get_health( &channel, - &vec![(IDS["publisher"].clone(), 2.into())] + &vec![(ADDRESSES["publisher"].clone(), 2.into())] .into_iter() .collect(), - &vec![(IDS["publisher"].clone(), 1.into())] + &vec![(ADDRESSES["publisher"].clone(), 1.into())] .into_iter() .collect() ) >= HEALTH_THRESHOLD @@ -246,10 +246,10 @@ mod test { assert!( get_health( &channel, - &vec![(IDS["publisher"].clone(), 80.into())] + &vec![(ADDRESSES["publisher"].clone(), 80.into())] .into_iter() .collect(), - &vec![(IDS["publisher"].clone(), 70.into())] + &vec![(ADDRESSES["publisher"].clone(), 70.into())] .into_iter() .collect() ) < HEALTH_THRESHOLD @@ -263,10 +263,10 @@ mod test { assert!( get_health( &channel, - &vec![(IDS["publisher"].clone(), 80.into())] + &vec![(ADDRESSES["publisher"].clone(), 80.into())] .into_iter() .collect(), - &vec![(IDS["publisher2"].clone(), 80.into())] + &vec![(ADDRESSES["publisher2"].clone(), 80.into())] .into_iter() .collect() ) < HEALTH_THRESHOLD @@ -275,12 +275,12 @@ mod test { assert!( get_health( &channel, - &vec![(IDS["publisher"].clone(), 80.into())] + &vec![(ADDRESSES["publisher"].clone(), 80.into())] .into_iter() .collect(), &vec![ - (IDS["publisher2"].clone(), 40.into()), - (IDS["publisher"].clone(), 40.into()) + (ADDRESSES["publisher2"].clone(), 40.into()), + (ADDRESSES["publisher"].clone(), 40.into()) ] .into_iter() .collect() @@ -290,12 +290,12 @@ mod test { assert!( get_health( &channel, - &vec![(IDS["publisher"].clone(), 80.into())] + &vec![(ADDRESSES["publisher"].clone(), 80.into())] .into_iter() .collect(), &vec![ - (IDS["publisher2"].clone(), 20.into()), - (IDS["publisher"].clone(), 60.into()) + (ADDRESSES["publisher2"].clone(), 20.into()), + (ADDRESSES["publisher"].clone(), 60.into()) ] .into_iter() .collect() @@ -305,12 +305,12 @@ mod test { assert!( get_health( &channel, - &vec![(IDS["publisher"].clone(), 80.into())] + &vec![(ADDRESSES["publisher"].clone(), 80.into())] .into_iter() .collect(), &vec![ - (IDS["publisher2"].clone(), 2.into()), - (IDS["publisher"].clone(), 78.into()) + (ADDRESSES["publisher2"].clone(), 2.into()), + (ADDRESSES["publisher"].clone(), 78.into()) ] .into_iter() .collect() @@ -321,12 +321,12 @@ mod test { get_health( &channel, &vec![ - (IDS["publisher"].clone(), 100.into()), - (IDS["publisher2"].clone(), 1.into()) + (ADDRESSES["publisher"].clone(), 100.into()), + (ADDRESSES["publisher2"].clone(), 1.into()) ] .into_iter() .collect(), - &vec![(IDS["publisher"].clone(), 100.into())] + &vec![(ADDRESSES["publisher"].clone(), 100.into())] .into_iter() .collect() ) >= HEALTH_THRESHOLD diff --git a/validator_worker/src/lib.rs b/validator_worker/src/lib.rs index 0da27b58c..8297d0096 100644 --- a/validator_worker/src/lib.rs +++ b/validator_worker/src/lib.rs @@ -19,7 +19,6 @@ pub mod sentry_interface; pub mod core { pub mod events; - pub mod fees; pub mod follower_rules; } @@ -45,7 +44,7 @@ mod test { use adapter::DummyAdapter; use primitives::adapter::DummyAdapterOptions; use primitives::config::configuration; - use primitives::util::tests::prep_db::{AUTH, DUMMY_CHANNEL, IDS}; + use primitives::util::tests::prep_db::{ADDRESSES, AUTH, DUMMY_CHANNEL, IDS}; use primitives::{BalancesMap, Channel}; use slog::{o, Discard, Logger}; @@ -69,8 +68,8 @@ mod test { let iface = setup_iface(&channel); let balances: BalancesMap = vec![ - (IDS["publisher"].clone(), 1.into()), - (IDS["tester"].clone(), 2.into()), + (ADDRESSES["publisher"].clone(), 1.into()), + (ADDRESSES["tester"].clone(), 2.into()), ] .into_iter() .collect(); @@ -90,7 +89,7 @@ mod test { let iface = setup_iface(&channel); - let balances: BalancesMap = vec![(IDS["publisher"].clone(), 0.into())] + let balances: BalancesMap = vec![(ADDRESSES["publisher"].clone(), 0.into())] .into_iter() .collect(); diff --git a/validator_worker/src/producer.rs b/validator_worker/src/producer.rs index 81338ca83..a0c00a1e3 100644 --- a/validator_worker/src/producer.rs +++ b/validator_worker/src/producer.rs @@ -30,24 +30,26 @@ pub async fn tick( let accounting = match validator_msg_resp { Some(MessageTypes::Accounting(accounting)) => accounting, _ => Accounting { - last_event_aggregate: Utc.timestamp(0, 0), - balances_before_fees: Default::default(), + last_aggregate: Utc.timestamp(0, 0), balances: Default::default(), }, }; - + // // TODO #381: AIP#61 Merge all Spender Aggregates and create a new Accounting // let aggrs = iface - .get_event_aggregates(accounting.last_event_aggregate) + .get_event_aggregates(accounting.last_aggregate) .await?; if aggrs.events.is_empty() { return Ok(TickStatus::NoNewEventAggr(accounting.balances)); } + // + // TODO: AIP#61 Merge all Spender Aggregates when it's implemented + // let new_accounting = merge_aggrs(&accounting, &aggrs.events, &iface.channel)?; if new_accounting.balances.is_empty() { From bdb3e5a9ecc39bf955a4f4557ada5625461028c4 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Thu, 25 Mar 2021 16:34:55 +0200 Subject: [PATCH 35/49] primitives - remove unexisting `spender` module --- primitives/src/lib.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/primitives/src/lib.rs b/primitives/src/lib.rs index 83a82ac1e..d205eb18c 100644 --- a/primitives/src/lib.rs +++ b/primitives/src/lib.rs @@ -35,7 +35,6 @@ pub mod ipfs; pub mod market; pub mod merkle_tree; pub mod sentry; -pub mod spender; pub mod supermarket; pub mod targeting; mod unified_num; From a752604c97e96746620a586c6c331d91a9c95e9b Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Mon, 29 Mar 2021 11:06:25 +0300 Subject: [PATCH 36/49] validator_worker - remove unnecessary `.clone()`s --- validator_worker/src/core/follower_rules.rs | 80 ++++++++++----------- validator_worker/src/lib.rs | 8 +-- 2 files changed, 44 insertions(+), 44 deletions(-) diff --git a/validator_worker/src/core/follower_rules.rs b/validator_worker/src/core/follower_rules.rs index fef298202..288cbdfc9 100644 --- a/validator_worker/src/core/follower_rules.rs +++ b/validator_worker/src/core/follower_rules.rs @@ -62,7 +62,7 @@ mod test { #[test] fn is_valid_transition_a_valid_transition() { - let next = vec![(ADDRESSES["publisher"].clone(), 100.into())] + let next = vec![(ADDRESSES["publisher"], 100.into())] .into_iter() .collect(); @@ -75,8 +75,8 @@ mod test { #[test] fn is_valid_transition_more_funds_than_dummy_channel() { let next = vec![ - (ADDRESSES["publisher"].clone(), 51.into()), - (ADDRESSES["publisher2"].clone(), 50.into()), + (ADDRESSES["publisher"], 51.into()), + (ADDRESSES["publisher2"], 50.into()), ] .into_iter() .collect(); @@ -89,11 +89,11 @@ mod test { #[test] fn is_valid_transition_single_value_is_lower() { - let prev = vec![(ADDRESSES["publisher"].clone(), 55.into())] + let prev = vec![(ADDRESSES["publisher"], 55.into())] .into_iter() .collect(); - let next = vec![(ADDRESSES["publisher"].clone(), 54.into())] + let next = vec![(ADDRESSES["publisher"], 54.into())] .into_iter() .collect(); @@ -105,13 +105,13 @@ mod test { #[test] fn is_valid_transition_a_value_is_lower_but_overall_sum_is_higher() { - let prev = vec![(ADDRESSES["publisher"].clone(), 55.into())] + let prev = vec![(ADDRESSES["publisher"], 55.into())] .into_iter() .collect(); let next = vec![ - (ADDRESSES["publisher"].clone(), 54.into()), - (ADDRESSES["publisher2"].clone(), 3.into()), + (ADDRESSES["publisher"], 54.into()), + (ADDRESSES["publisher2"], 3.into()), ] .into_iter() .collect(); @@ -125,13 +125,13 @@ mod test { #[test] fn is_valid_transition_overall_sum_is_lower() { let prev = vec![ - (ADDRESSES["publisher"].clone(), 54.into()), - (ADDRESSES["publisher2"].clone(), 3.into()), + (ADDRESSES["publisher"], 54.into()), + (ADDRESSES["publisher2"], 3.into()), ] .into_iter() .collect(); - let next = vec![(ADDRESSES["publisher"].clone(), 54.into())] + let next = vec![(ADDRESSES["publisher"], 54.into())] .into_iter() .collect(); @@ -144,13 +144,13 @@ mod test { #[test] fn is_valid_transition_overall_sum_is_the_same_but_we_remove_an_entry() { let prev = vec![ - (ADDRESSES["publisher"].clone(), 54.into()), - (ADDRESSES["publisher2"].clone(), 3.into()), + (ADDRESSES["publisher"], 54.into()), + (ADDRESSES["publisher2"], 3.into()), ] .into_iter() .collect(); - let next = vec![(ADDRESSES["publisher"].clone(), 57.into())] + let next = vec![(ADDRESSES["publisher"], 57.into())] .into_iter() .collect(); @@ -163,13 +163,13 @@ mod test { #[test] fn is_valid_transition_transition_to_a_state_with_a_negative_number() { let prev = vec![ - (ADDRESSES["publisher"].clone(), 54.into()), - (ADDRESSES["publisher2"].clone(), 3.into()), + (ADDRESSES["publisher"], 54.into()), + (ADDRESSES["publisher2"], 3.into()), ] .into_iter() .collect(); - let next = vec![(ADDRESSES["publisher"].clone(), 57.into())] + let next = vec![(ADDRESSES["publisher"], 57.into())] .into_iter() .collect(); @@ -182,7 +182,7 @@ mod test { #[test] fn get_health_the_approved_balance_tree_gte_our_accounting_is_healthy() { let channel = get_dummy_channel(50); - let our = vec![(ADDRESSES["publisher"].clone(), 50.into())] + let our = vec![(ADDRESSES["publisher"], 50.into())] .into_iter() .collect(); assert!(get_health(&channel, &our, &our) >= HEALTH_THRESHOLD); @@ -191,7 +191,7 @@ mod test { get_health( &channel, &our, - &vec![(ADDRESSES["publisher"].clone(), 60.into())] + &vec![(ADDRESSES["publisher"], 60.into())] .into_iter() .collect() ) >= HEALTH_THRESHOLD @@ -200,7 +200,7 @@ mod test { #[test] fn get_health_the_approved_balance_tree_is_positive_our_accounting_is_0_and_it_is_healthy() { - let approved = vec![(ADDRESSES["publisher"].clone(), 50.into())] + let approved = vec![(ADDRESSES["publisher"], 50.into())] .into_iter() .collect(); @@ -217,10 +217,10 @@ mod test { assert!( get_health( &channel, - &vec![(ADDRESSES["publisher"].clone(), 80.into())] + &vec![(ADDRESSES["publisher"], 80.into())] .into_iter() .collect(), - &vec![(ADDRESSES["publisher"].clone(), 79.into())] + &vec![(ADDRESSES["publisher"], 79.into())] .into_iter() .collect() ) >= HEALTH_THRESHOLD @@ -229,10 +229,10 @@ mod test { assert!( get_health( &channel, - &vec![(ADDRESSES["publisher"].clone(), 2.into())] + &vec![(ADDRESSES["publisher"], 2.into())] .into_iter() .collect(), - &vec![(ADDRESSES["publisher"].clone(), 1.into())] + &vec![(ADDRESSES["publisher"], 1.into())] .into_iter() .collect() ) >= HEALTH_THRESHOLD @@ -246,10 +246,10 @@ mod test { assert!( get_health( &channel, - &vec![(ADDRESSES["publisher"].clone(), 80.into())] + &vec![(ADDRESSES["publisher"], 80.into())] .into_iter() .collect(), - &vec![(ADDRESSES["publisher"].clone(), 70.into())] + &vec![(ADDRESSES["publisher"], 70.into())] .into_iter() .collect() ) < HEALTH_THRESHOLD @@ -263,10 +263,10 @@ mod test { assert!( get_health( &channel, - &vec![(ADDRESSES["publisher"].clone(), 80.into())] + &vec![(ADDRESSES["publisher"], 80.into())] .into_iter() .collect(), - &vec![(ADDRESSES["publisher2"].clone(), 80.into())] + &vec![(ADDRESSES["publisher2"], 80.into())] .into_iter() .collect() ) < HEALTH_THRESHOLD @@ -275,12 +275,12 @@ mod test { assert!( get_health( &channel, - &vec![(ADDRESSES["publisher"].clone(), 80.into())] + &vec![(ADDRESSES["publisher"], 80.into())] .into_iter() .collect(), &vec![ - (ADDRESSES["publisher2"].clone(), 40.into()), - (ADDRESSES["publisher"].clone(), 40.into()) + (ADDRESSES["publisher2"], 40.into()), + (ADDRESSES["publisher"], 40.into()) ] .into_iter() .collect() @@ -290,12 +290,12 @@ mod test { assert!( get_health( &channel, - &vec![(ADDRESSES["publisher"].clone(), 80.into())] + &vec![(ADDRESSES["publisher"], 80.into())] .into_iter() .collect(), &vec![ - (ADDRESSES["publisher2"].clone(), 20.into()), - (ADDRESSES["publisher"].clone(), 60.into()) + (ADDRESSES["publisher2"], 20.into()), + (ADDRESSES["publisher"], 60.into()) ] .into_iter() .collect() @@ -305,12 +305,12 @@ mod test { assert!( get_health( &channel, - &vec![(ADDRESSES["publisher"].clone(), 80.into())] + &vec![(ADDRESSES["publisher"], 80.into())] .into_iter() .collect(), &vec![ - (ADDRESSES["publisher2"].clone(), 2.into()), - (ADDRESSES["publisher"].clone(), 78.into()) + (ADDRESSES["publisher2"], 2.into()), + (ADDRESSES["publisher"], 78.into()) ] .into_iter() .collect() @@ -321,12 +321,12 @@ mod test { get_health( &channel, &vec![ - (ADDRESSES["publisher"].clone(), 100.into()), - (ADDRESSES["publisher2"].clone(), 1.into()) + (ADDRESSES["publisher"], 100.into()), + (ADDRESSES["publisher2"], 1.into()) ] .into_iter() .collect(), - &vec![(ADDRESSES["publisher"].clone(), 100.into())] + &vec![(ADDRESSES["publisher"], 100.into())] .into_iter() .collect() ) >= HEALTH_THRESHOLD diff --git a/validator_worker/src/lib.rs b/validator_worker/src/lib.rs index 8297d0096..0096be1d2 100644 --- a/validator_worker/src/lib.rs +++ b/validator_worker/src/lib.rs @@ -50,7 +50,7 @@ mod test { fn setup_iface(channel: &Channel) -> SentryApi { let adapter_options = DummyAdapterOptions { - dummy_identity: IDS["leader"].clone(), + dummy_identity: IDS["leader"], dummy_auth: IDS.clone(), dummy_auth_tokens: AUTH.clone(), }; @@ -68,8 +68,8 @@ mod test { let iface = setup_iface(&channel); let balances: BalancesMap = vec![ - (ADDRESSES["publisher"].clone(), 1.into()), - (ADDRESSES["tester"].clone(), 2.into()), + (ADDRESSES["publisher"], 1.into()), + (ADDRESSES["tester"], 2.into()), ] .into_iter() .collect(); @@ -89,7 +89,7 @@ mod test { let iface = setup_iface(&channel); - let balances: BalancesMap = vec![(ADDRESSES["publisher"].clone(), 0.into())] + let balances: BalancesMap = vec![(ADDRESSES["publisher"], 0.into())] .into_iter() .collect(); From 42bd6ecfed3d6b3251aa2f8f9f742055ae53fe4d Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Mon, 5 Apr 2021 13:29:02 +0300 Subject: [PATCH 37/49] primitives - Campaign::find_validator - primitives - sentry - Earner - remove unused struct - primtivies - validator - ValidatorDesc docs of fields --- primitives/src/campaign.rs | 8 ++++++++ primitives/src/sentry.rs | 7 ------- primitives/src/validator.rs | 5 ++++- 3 files changed, 12 insertions(+), 8 deletions(-) diff --git a/primitives/src/campaign.rs b/primitives/src/campaign.rs index 75ed6c9fc..13a5be8c2 100644 --- a/primitives/src/campaign.rs +++ b/primitives/src/campaign.rs @@ -184,6 +184,14 @@ pub struct Campaign { } impl Campaign { + pub fn find_validator(&self, validator: ValidatorId) -> Option<&'_ ValidatorDesc> { + match (self.leader(), self.follower()) { + (Some(leader), _) if leader.id == validator => Some(leader), + (_, Some(follower)) if follower.id == validator => Some(follower), + _ => None, + } + } + /// Matches the Channel.leader to the Campaign.spec.leader /// If they match it returns `Some`, otherwise, it returns `None` pub fn leader(&self) -> Option<&'_ ValidatorDesc> { diff --git a/primitives/src/sentry.rs b/primitives/src/sentry.rs index ca999aa84..f3ae035ba 100644 --- a/primitives/src/sentry.rs +++ b/primitives/src/sentry.rs @@ -172,13 +172,6 @@ impl fmt::Display for Event { } } -#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)] -pub struct Earner { - #[serde(rename = "publisher")] - pub address: String, - pub promilles: u64, -} - #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct EventAggregate { diff --git a/primitives/src/validator.rs b/primitives/src/validator.rs index d644c18dc..d8e22d6bd 100644 --- a/primitives/src/validator.rs +++ b/primitives/src/validator.rs @@ -106,10 +106,13 @@ impl TryFrom for ValidatorId { #[serde(rename_all = "camelCase")] pub struct ValidatorDesc { pub id: ValidatorId, + /// The validator fee in pro milles (per 1000) + pub fee: UnifiedNum, #[serde(default, skip_serializing_if = "Option::is_none")] + /// The address which will receive the fees pub fee_addr: Option
, + /// The url of the Validator on which is the API pub url: String, - pub fee: UnifiedNum, } /// Validator Message Types From 5ae2cd5b28873f600343f3510a457d6694833a68 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Mon, 5 Apr 2021 13:29:46 +0300 Subject: [PATCH 38/49] primitives - UnifiedNum - add const fn from_u64 --- primitives/src/unified_num.rs | 52 ++++++++++++++++++++++++++++++++++- 1 file changed, 51 insertions(+), 1 deletion(-) diff --git a/primitives/src/unified_num.rs b/primitives/src/unified_num.rs index e3fc07392..5cc4f26c9 100644 --- a/primitives/src/unified_num.rs +++ b/primitives/src/unified_num.rs @@ -46,7 +46,11 @@ impl UnifiedNum { Self(self.0.div_floor(&other.0)) } - pub fn to_u64(&self) -> u64 { + pub const fn from_u64(value: u64) -> Self { + Self(value) + } + + pub const fn to_u64(&self) -> u64 { self.0 } @@ -334,4 +338,50 @@ mod test { "It should not make any adjustments to the precision" ); } + + #[test] + fn div_and_floor_fee_calculation() { + // 1.00007777 + let one_sevens = UnifiedNum::from(100_007_777_u64); + let pro_milles = UnifiedNum::from(1_000); + let division = one_sevens.div(&pro_milles); + let fee = UnifiedNum::from(7); + + assert_eq!(UnifiedNum::from(100_007), division); + // e.g. fee of 7 pro milles + assert_eq!(UnifiedNum::from(700_049), division * &fee); + } + + #[test] + fn mul_first_and_div_fee_calculation() { + // 1.00007777 + let one_sevens = UnifiedNum::from(100_007_777_u64); + let pro_milles = UnifiedNum::from(1_000); + let fee = UnifiedNum::from(7); + let multiply = one_sevens.mul(&fee); + + // assert_eq!(UnifiedNum::from(100_007), multiply); + // e.g. fee of 7 pro milles + assert_eq!(UnifiedNum::from(700_049), multiply.div(&pro_milles)); + } + + #[test] + fn div_rem_fee_calculation() { + // 1.00007777 + let one_sevens = UnifiedNum::from(100_007_777_u64); + let pro_milles = UnifiedNum::from(1_000); + let fee = UnifiedNum::from(7); + + let (quotient, remainder) = one_sevens.div_rem(&pro_milles); + let main_fee = quotient * &fee; + assert_eq!(&UnifiedNum::from(700_049), &main_fee); + + let expected_remainder = UnifiedNum::from(777); + assert_eq!(&expected_remainder, &remainder); + + let expected_fee_of_remainder = UnifiedNum::from(5_439).div_floor(&pro_milles); + assert_eq!(expected_fee_of_remainder, (&expected_remainder * &fee).div_floor(&pro_milles)); + + assert_eq!(UnifiedNum::from(700_054), main_fee + expected_fee_of_remainder); + } } From 53ddd4ad817c3e3ccae0f45918afe3e6a16acfff Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Mon, 5 Apr 2021 13:31:19 +0300 Subject: [PATCH 39/49] primitives & sentry - init spender modules --- primitives/src/lib.rs | 1 + primitives/src/spender.rs | 27 +++++++++++++++++++ sentry/src/lib.rs | 1 + sentry/src/spender.rs | 56 +++++++++++++++++++++++++++++++++++++++ 4 files changed, 85 insertions(+) create mode 100644 primitives/src/spender.rs create mode 100644 sentry/src/spender.rs diff --git a/primitives/src/lib.rs b/primitives/src/lib.rs index d205eb18c..11e4c216a 100644 --- a/primitives/src/lib.rs +++ b/primitives/src/lib.rs @@ -36,6 +36,7 @@ pub mod market; pub mod merkle_tree; pub mod sentry; pub mod supermarket; +pub mod spender; pub mod targeting; mod unified_num; pub mod validator; diff --git a/primitives/src/spender.rs b/primitives/src/spender.rs new file mode 100644 index 000000000..527756a70 --- /dev/null +++ b/primitives/src/spender.rs @@ -0,0 +1,27 @@ +use crate::{Address, BalancesMap, UnifiedNum, channel_v5::Channel}; +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Deposit { + pub total: UnifiedNum, + pub still_on_create2: UnifiedNum, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Spendable { + pub spender: Address, + pub channel: Channel, + #[serde(flatten)] + pub deposit: Deposit, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Aggregate { + pub spender: Address, + pub channel: Channel, + pub balances: BalancesMap, + pub created: DateTime, +} diff --git a/sentry/src/lib.rs b/sentry/src/lib.rs index d3a4a5e29..7bf1378cd 100644 --- a/sentry/src/lib.rs +++ b/sentry/src/lib.rs @@ -44,6 +44,7 @@ pub mod db; pub mod event_aggregator; pub mod event_reducer; pub mod payout; +pub mod spender; lazy_static! { static ref CHANNEL_GET_BY_ID: Regex = diff --git a/sentry/src/spender.rs b/sentry/src/spender.rs new file mode 100644 index 000000000..051166915 --- /dev/null +++ b/sentry/src/spender.rs @@ -0,0 +1,56 @@ +use std::time::Duration; + +use dashmap::DashMap; +use primitives::{spender::Aggregate, ChannelId}; + +#[derive(Debug)] +/// +pub struct Aggregator { + /// In-memory aggregates waiting to be saved to the underlying persistence storage (database) + aggregates: DashMap, + /// Specifies how often the Aggregate should be stored in the underlying persistence storage (database) + throttle: Duration, +} + +impl Aggregator { + /// Stores the aggregate to the database + pub fn store_aggregates() { + todo!("Store aggregate to DB") + } + /// Records new spending triggered by a Payout event + pub async fn record() { + todo!("Record a new payout") + } +} + +pub mod fee { + pub const PRO_MILLE: UnifiedNum = UnifiedNum::from_u64(1000); + + use primitives::{Address, Campaign, DomainError, UnifiedNum, ValidatorId}; + + /// Calculates the fee for a specified validator + /// This function will return None if the provided validator is not part of the Campaign / Channel + /// In the case of overflow when calculating the payout, an error will be returned + pub fn calculate_fees( + (_earner, payout): (Address, UnifiedNum), + campaign: &Campaign, + for_validator: ValidatorId, + ) -> Result, DomainError> { + let payout = match campaign.find_validator(for_validator) { + Some(validator) => { + // should never overflow + let fee_payout = payout + .checked_mul(validator.fee) + .ok_or(DomainError::InvalidArgument( + "payout calculation overflow".to_string(), + ))? + .div_floor(&PRO_MILLE); + + Some(fee_payout) + } + None => None, + }; + + Ok(payout) + } +} From 75e63e82c71ccaa225f93e1dc1b9ee51a9df17f6 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Tue, 6 Apr 2021 14:10:21 +0300 Subject: [PATCH 40/49] fix errors & rustfmt --- primitives/src/campaign.rs | 2 +- primitives/src/lib.rs | 2 +- primitives/src/spender.rs | 2 +- primitives/src/unified_num.rs | 16 +++++++++++----- sentry/src/routes/channel.rs | 7 +++++++ sentry/src/spender.rs | 4 ++-- 6 files changed, 23 insertions(+), 10 deletions(-) diff --git a/primitives/src/campaign.rs b/primitives/src/campaign.rs index 13a5be8c2..c4691ed7c 100644 --- a/primitives/src/campaign.rs +++ b/primitives/src/campaign.rs @@ -1,6 +1,6 @@ use crate::{ channel_v5::Channel, targeting::Rules, AdUnit, Address, EventSubmission, UnifiedNum, - ValidatorDesc, + ValidatorDesc, ValidatorId, }; use chrono::{ diff --git a/primitives/src/lib.rs b/primitives/src/lib.rs index 11e4c216a..83a82ac1e 100644 --- a/primitives/src/lib.rs +++ b/primitives/src/lib.rs @@ -35,8 +35,8 @@ pub mod ipfs; pub mod market; pub mod merkle_tree; pub mod sentry; -pub mod supermarket; pub mod spender; +pub mod supermarket; pub mod targeting; mod unified_num; pub mod validator; diff --git a/primitives/src/spender.rs b/primitives/src/spender.rs index 527756a70..2af887a0e 100644 --- a/primitives/src/spender.rs +++ b/primitives/src/spender.rs @@ -1,4 +1,4 @@ -use crate::{Address, BalancesMap, UnifiedNum, channel_v5::Channel}; +use crate::{channel_v5::Channel, Address, BalancesMap, UnifiedNum}; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; diff --git a/primitives/src/unified_num.rs b/primitives/src/unified_num.rs index 5cc4f26c9..499007924 100644 --- a/primitives/src/unified_num.rs +++ b/primitives/src/unified_num.rs @@ -346,7 +346,7 @@ mod test { let pro_milles = UnifiedNum::from(1_000); let division = one_sevens.div(&pro_milles); let fee = UnifiedNum::from(7); - + assert_eq!(UnifiedNum::from(100_007), division); // e.g. fee of 7 pro milles assert_eq!(UnifiedNum::from(700_049), division * &fee); @@ -359,12 +359,12 @@ mod test { let pro_milles = UnifiedNum::from(1_000); let fee = UnifiedNum::from(7); let multiply = one_sevens.mul(&fee); - + // assert_eq!(UnifiedNum::from(100_007), multiply); // e.g. fee of 7 pro milles assert_eq!(UnifiedNum::from(700_049), multiply.div(&pro_milles)); } - + #[test] fn div_rem_fee_calculation() { // 1.00007777 @@ -380,8 +380,14 @@ mod test { assert_eq!(&expected_remainder, &remainder); let expected_fee_of_remainder = UnifiedNum::from(5_439).div_floor(&pro_milles); - assert_eq!(expected_fee_of_remainder, (&expected_remainder * &fee).div_floor(&pro_milles)); + assert_eq!( + expected_fee_of_remainder, + (&expected_remainder * &fee).div_floor(&pro_milles) + ); - assert_eq!(UnifiedNum::from(700_054), main_fee + expected_fee_of_remainder); + assert_eq!( + UnifiedNum::from(700_054), + main_fee + expected_fee_of_remainder + ); } } diff --git a/sentry/src/routes/channel.rs b/sentry/src/routes/channel.rs index cfaeff9e4..f57cd9a80 100644 --- a/sentry/src/routes/channel.rs +++ b/sentry/src/routes/channel.rs @@ -205,6 +205,13 @@ pub async fn insert_events( // TODO #381: AIP#61 Spender Aggregator should be called // + // handle events - check access + // handle events - Update targeting rules + // calculate payout + // distribute fees + // handle spending - Spender Aggregate + // handle events - aggregate Events and put into analytics + app.event_aggregator .record(app, &channel_id, session, auth, events) .await?; diff --git a/sentry/src/spender.rs b/sentry/src/spender.rs index 051166915..b1dab92ce 100644 --- a/sentry/src/spender.rs +++ b/sentry/src/spender.rs @@ -24,7 +24,7 @@ impl Aggregator { } pub mod fee { - pub const PRO_MILLE: UnifiedNum = UnifiedNum::from_u64(1000); + pub const PRO_MILLE: UnifiedNum = UnifiedNum::from_u64(1_000); use primitives::{Address, Campaign, DomainError, UnifiedNum, ValidatorId}; @@ -40,7 +40,7 @@ pub mod fee { Some(validator) => { // should never overflow let fee_payout = payout - .checked_mul(validator.fee) + .checked_mul(&validator.fee) .ok_or(DomainError::InvalidArgument( "payout calculation overflow".to_string(), ))? From 55616d4d6e4304d7c2672a1a25cd3d3206c07b23 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Fri, 9 Apr 2021 16:42:33 +0300 Subject: [PATCH 41/49] Issue #381 & #382 - sentry migrations: - Change the channel table to campaign - add a spendable table - update other tables & add todos for AIP#61 --- .../20190806011140_initial-tables/up.sql | 82 ++++++++++++------- 1 file changed, 51 insertions(+), 31 deletions(-) diff --git a/sentry/migrations/20190806011140_initial-tables/up.sql b/sentry/migrations/20190806011140_initial-tables/up.sql index 3ac05a96e..237dea717 100644 --- a/sentry/migrations/20190806011140_initial-tables/up.sql +++ b/sentry/migrations/20190806011140_initial-tables/up.sql @@ -1,48 +1,68 @@ -CREATE TABLE channels -( - id VARCHAR(66) NOT NULL, - creator VARCHAR(255) NOT NULL, - deposit_asset VARCHAR(42) NOT NULL, - deposit_amount VARCHAR(255) NOT NULL, - valid_until TIMESTAMP(2) WITH TIME ZONE NOT NULL, - spec JSONB NOT NULL, - targeting_rules JSONB DEFAULT '[]' NOT NULL, - exhausted BOOLEAN[2] DEFAULT '{}' NOT NULL, - +CREATE TABLE campaigns ( + id varchar(34) NOT NULL, + channel_id varchar(66) NOT NULL, + channel jsonb NOT NULL, + creator varchar(42) NOT NULL, + budget numeric(20, 8) NOT NULL, + validators jsonb NOT NULL, + title varchar(255) NULL, + pricing_bounds jsonb DEFAULT '{}' NULL, + event_submission jsonb DEFAULT '{}' NULL, + ad_units jsonb DEFAULT '[]' NOT NULL, + targeting_rules jsonb DEFAULT '[]' NOT NULL, + created timestamp(2) with time zone NOT NULL, + active_from timestamp(2) with time zone NULL, + active_to timestamp(2) with time zone NOT NULL, PRIMARY KEY (id) ); -CREATE INDEX idx_channel_valid_until ON channels (valid_until); -CREATE INDEX idx_channels_spec_created ON channels ((spec ->> 'created')); +CREATE INDEX idx_campaign_active_to ON campaign (active_to); + +CREATE INDEX idx_campaign_created ON campaign (created); -CREATE TABLE validator_messages -( - channel_id VARCHAR(66) NOT NULL REFERENCES channels (id) ON DELETE RESTRICT, - "from" VARCHAR(255) NOT NULL, - msg JSONB NOT NULL, - received TIMESTAMP(2) WITH TIME ZONE NOT NULL +CREATE TABLE spendable ( + spender varchar(42) NOT NULL, + channel_id varchar(66) NOT NULL, + channel jsonb NOT NULL, + total numeric(20, 8) NOT NULL, + still_on_create2 numeric(20, 8), + PRIMARY KEY (spender, channel_id) +); + +CREATE TABLE validator_messages ( + -- TODO: Should the validator message be reference to channel_id or campaign_id? + channel_id varchar(66) NOT NULL, -- REFERENCES channels (id) ON DELETE RESTRICT, + "from" varchar(255) NOT NULL, + msg jsonb NOT NULL, + received timestamp(2) with time zone NOT NULL ); CREATE INDEX idx_validator_messages_received ON validator_messages (received); + CREATE INDEX idx_validator_messages_msg_type ON validator_messages ((msg ->> 'type')); + CREATE INDEX idx_validator_messages_msg_state_root ON validator_messages ((msg ->> 'stateRoot')); -CREATE TABLE event_aggregates -( - channel_id VARCHAR(66) NOT NULL REFERENCES channels (id) ON DELETE RESTRICT, - created TIMESTAMP(2) WITH TIME ZONE NOT NULL DEFAULT NOW(), - event_type VARCHAR(255) NOT NULL, - earner VARCHAR(255), - count VARCHAR NOT NULL, - payout VARCHAR NOT NULL +-- TODO: AIP#61 Alter Event Aggregates +CREATE TABLE event_aggregates ( + channel_id varchar(66) NOT NULL REFERENCES channels (id) ON DELETE RESTRICT, + created timestamp(2) with time zone NOT NULL DEFAULT NOW(), + event_type varchar(255) NOT NULL, + earner varchar(42), + -- todo: AIP#61 check the count and payout + count varchar NOT NULL, + payout varchar NOT NULL ); CREATE INDEX idx_event_aggregates_created ON event_aggregates (created); + CREATE INDEX idx_event_aggregates_channel ON event_aggregates (channel_id); + CREATE INDEX idx_event_aggregates_event_type ON event_aggregates (event_type); -CREATE AGGREGATE jsonb_object_agg(jsonb) ( - SFUNC = 'jsonb_concat', - STYPE = jsonb, - INITCOND = '{}' +CREATE AGGREGATE jsonb_object_agg (jsonb) ( + SFUNC = 'jsonb_concat', + STYPE = jsonb, + INITCOND = '{}' ); + From da56536106b9eee1a58eb38d94eee84db5511d8f Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Wed, 14 Apr 2021 11:50:30 +0300 Subject: [PATCH 42/49] sentry - Cargo - add deps for postgres tests --- Cargo.lock | 106 ++++++++++++---------------------------------- sentry/Cargo.toml | 9 +++- 2 files changed, 33 insertions(+), 82 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 10fd16fb0..7c6ca445b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -391,7 +391,7 @@ dependencies = [ "async-trait", "bb8", "tokio 1.0.2", - "tokio-postgres 0.7.0", + "tokio-postgres", ] [[package]] @@ -832,16 +832,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "crypto-mac" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58bcd97a54c7ca5ce2f6eb16f6bede5b0ab5f0055fedc17d2f0b4466e21671ca" -dependencies = [ - "generic-array 0.14.4", - "subtle", -] - [[package]] name = "crypto-mac" version = "0.10.0" @@ -970,7 +960,7 @@ dependencies = [ "log", "serde", "tokio 1.0.2", - "tokio-postgres 0.7.0", + "tokio-postgres", ] [[package]] @@ -1649,16 +1639,6 @@ dependencies = [ "digest 0.7.6", ] -[[package]] -name = "hmac" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "deae6d9dbb35ec2c502d62b8f7b1c000a0822c3b0794ba36b3149c0a1c840dff" -dependencies = [ - "crypto-mac 0.9.1", - "digest 0.9.0", -] - [[package]] name = "hmac" version = "0.10.1" @@ -2097,16 +2077,18 @@ dependencies = [ [[package]] name = "migrant_lib" -version = "0.30.0" +version = "0.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdd763657dd793f0cfb3f5e5e1eb754bf56c38b5bc964696ebc4573cdfd0c240" +checksum = "ff83d86ebbdeaf04b7461dfe75afe7be21a38937321f7f0c9069b36e2f35eadf" dependencies = [ "chrono", "error-chain", "lazy_static", "log", + "native-tls", "percent-encoding", "postgres", + "postgres-native-tls", "regex", "serde", "serde_derive", @@ -2748,34 +2730,29 @@ dependencies = [ [[package]] name = "postgres" -version = "0.17.5" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14d864cf6c2eabf1323afe4145ff273aad1898e4f2a3bcb30347715df8624a07" +checksum = "0f853fba627ed1f21392d329eeb03caf90dce57a65dfbd24274f4c39452ed3bb" dependencies = [ - "bytes 0.5.6", + "bytes 1.0.1", "fallible-iterator", "futures 0.3.12", "log", - "tokio 0.2.24", - "tokio-postgres 0.5.5", + "tokio 1.0.2", + "tokio-postgres", ] [[package]] -name = "postgres-protocol" -version = "0.5.3" +name = "postgres-native-tls" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4888a0e36637ab38d76cace88c1476937d617ad015f07f6b669cec11beacc019" +checksum = "2d442770e2b1e244bb5eb03b31c79b65bb2568f413b899eaba850fa945a65954" dependencies = [ - "base64 0.13.0", - "byteorder 1.4.2", - "bytes 0.5.6", - "fallible-iterator", - "hmac 0.9.0", - "md5", - "memchr", - "rand 0.7.3", - "sha2 0.9.2", - "stringprep", + "futures 0.3.12", + "native-tls", + "tokio 1.0.2", + "tokio-native-tls", + "tokio-postgres", ] [[package]] @@ -2796,17 +2773,6 @@ dependencies = [ "stringprep", ] -[[package]] -name = "postgres-types" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfc08a7d94a80665de4a83942fa8db2fdeaf2f123fc0535e384dc4fff251efae" -dependencies = [ - "bytes 0.5.6", - "fallible-iterator", - "postgres-protocol 0.5.3", -] - [[package]] name = "postgres-types" version = "0.2.0" @@ -2816,7 +2782,7 @@ dependencies = [ "bytes 1.0.1", "chrono", "fallible-iterator", - "postgres-protocol 0.6.0", + "postgres-protocol", "serde", "serde_json", ] @@ -2886,7 +2852,7 @@ dependencies = [ "num-traits", "once_cell", "parse-display", - "postgres-types 0.2.0", + "postgres-types", "pretty_assertions", "rand 0.8.2", "serde", @@ -2901,7 +2867,7 @@ dependencies = [ "time 0.1.43", "tiny-keccak 2.0.2", "tokio 1.0.2", - "tokio-postgres 0.7.0", + "tokio-postgres", "toml", "url", "uuid", @@ -3552,6 +3518,7 @@ dependencies = [ "clap", "dashmap", "deadpool", + "deadpool-postgres", "futures 0.3.12", "hex", "hyper 0.14.2", @@ -3567,6 +3534,7 @@ dependencies = [ "slog", "thiserror", "tokio 1.0.2", + "tokio-postgres", ] [[package]] @@ -4215,28 +4183,6 @@ dependencies = [ "tokio 1.0.2", ] -[[package]] -name = "tokio-postgres" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55a2482c9fe4dd481723cf5c0616f34afc710e55dcda0944e12e7b3316117892" -dependencies = [ - "async-trait", - "byteorder 1.4.2", - "bytes 0.5.6", - "fallible-iterator", - "futures 0.3.12", - "log", - "parking_lot 0.11.1", - "percent-encoding", - "phf", - "pin-project-lite 0.1.11", - "postgres-protocol 0.5.3", - "postgres-types 0.1.3", - "tokio 0.2.24", - "tokio-util 0.3.1", -] - [[package]] name = "tokio-postgres" version = "0.7.0" @@ -4253,8 +4199,8 @@ dependencies = [ "percent-encoding", "phf", "pin-project-lite 0.2.4", - "postgres-protocol 0.6.0", - "postgres-types 0.2.0", + "postgres-protocol", + "postgres-types", "socket2", "tokio 1.0.2", "tokio-util 0.6.1", diff --git a/sentry/Cargo.toml b/sentry/Cargo.toml index 89bba8f24..968daf394 100644 --- a/sentry/Cargo.toml +++ b/sentry/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "sentry" version = "0.1.0" -authors = ["Omidiora Samuel "] +authors = ["Lachezar Lechev ", "Omidiora Samuel "] edition = "2018" [dependencies] @@ -27,7 +27,7 @@ bb8 = "0.7" bb8-postgres = { version = "0.7", features = ["with-chrono-0_4", "with-serde_json-1"] } # Migrations -migrant_lib = { version = "^0.30", features = ["d-postgres"] } +migrant_lib = { version = "^0.32", features = ["d-postgres"] } # Logger slog = { version = "^2.2.3", features = ["max_level_trace"] } # Serde @@ -37,5 +37,10 @@ serde_urlencoded = "^0.7" # Other lazy_static = "1.4.0" thiserror = "^1.0" +tokio-postgres = { version = "0.7.0", features = ["with-chrono-0_4", "with-serde_json-1"] } + +[dev-dependencies] +# todo: Replace `bb8` once we update all places. deadpool = "0.7.0" +deadpool-postgres = "0.7.0" once_cell = "1.5.2" From 8764182ee7c7151c1d8535d138639915cdecae22 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Wed, 14 Apr 2021 11:50:59 +0300 Subject: [PATCH 43/49] sentry - migrations - initial-tables - fix types & typos --- .../migrations/20190806011140_initial-tables/up.sql | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/sentry/migrations/20190806011140_initial-tables/up.sql b/sentry/migrations/20190806011140_initial-tables/up.sql index 237dea717..7db83d194 100644 --- a/sentry/migrations/20190806011140_initial-tables/up.sql +++ b/sentry/migrations/20190806011140_initial-tables/up.sql @@ -3,7 +3,7 @@ CREATE TABLE campaigns ( channel_id varchar(66) NOT NULL, channel jsonb NOT NULL, creator varchar(42) NOT NULL, - budget numeric(20, 8) NOT NULL, + budget bigint NOT NULL, validators jsonb NOT NULL, title varchar(255) NULL, pricing_bounds jsonb DEFAULT '{}' NULL, @@ -16,16 +16,16 @@ CREATE TABLE campaigns ( PRIMARY KEY (id) ); -CREATE INDEX idx_campaign_active_to ON campaign (active_to); +CREATE INDEX idx_campaign_active_to ON campaigns (active_to); -CREATE INDEX idx_campaign_created ON campaign (created); +CREATE INDEX idx_campaign_created ON campaigns (created); CREATE TABLE spendable ( spender varchar(42) NOT NULL, channel_id varchar(66) NOT NULL, channel jsonb NOT NULL, - total numeric(20, 8) NOT NULL, - still_on_create2 numeric(20, 8), + total bigint NOT NULL, + still_on_create2 bigint NOT NULL, PRIMARY KEY (spender, channel_id) ); @@ -45,7 +45,7 @@ CREATE INDEX idx_validator_messages_msg_state_root ON validator_messages ((msg - -- TODO: AIP#61 Alter Event Aggregates CREATE TABLE event_aggregates ( - channel_id varchar(66) NOT NULL REFERENCES channels (id) ON DELETE RESTRICT, + channel_id varchar(66) NOT NULL, -- REFERENCES channels (id) ON DELETE RESTRICT, created timestamp(2) with time zone NOT NULL DEFAULT NOW(), event_type varchar(255) NOT NULL, earner varchar(42), From 99de2f5efc2ac47551ab52298ce02e551c8f062f Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Wed, 14 Apr 2021 12:21:48 +0300 Subject: [PATCH 44/49] sentry - db - postgres_pool - Test pool with migration setup fn --- sentry/src/db.rs | 164 ++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 162 insertions(+), 2 deletions(-) diff --git a/sentry/src/db.rs b/sentry/src/db.rs index 612509fdd..57e8e0d97 100644 --- a/sentry/src/db.rs +++ b/sentry/src/db.rs @@ -44,8 +44,8 @@ pub async fn postgres_connection() -> Result; + + /// we must have a duplication of the migration because of how migrant is handling migratoins + /// we need to separately setup test migrations + pub static MIGRATIONS: &[&str] = &["20190806011140_initial-tables"]; + + pub static TESTS_POOL: Lazy = Lazy::new(|| { + use deadpool_postgres::{ManagerConfig, RecyclingMethod}; + use tokio_postgres::tls::NoTls; + let mut config = bb8_postgres::tokio_postgres::Config::new(); + + config + .user(POSTGRES_USER.as_str()) + .password(POSTGRES_PASSWORD.as_str()) + .host(POSTGRES_HOST.as_str()) + .port(*POSTGRES_PORT); + if let Some(db) = POSTGRES_DB.as_ref() { + config.dbname(db); + } + + let deadpool_manager = deadpool_postgres::Manager::from_config( + config, + NoTls, + ManagerConfig { + recycling_method: RecyclingMethod::Verified, + }, + ); + + Pool::new( + Manager { + postgres_manager: Arc::new(deadpool_manager), + index: AtomicUsize::new(0), + }, + 15, + ) + }); + + /// A Scheme is used to isolate test runs from each other + /// we need to know the name of the schema we've created. + /// This will allow us the drop the schema when we are recycling the connection + pub struct Schema { + /// The schema name that will be created by the pool `CREATE SCHEMA` + /// This schema will be set as the connection `search_path` (`SET SCHEMA` for short) + pub name: String, + pub client: ClientWrapper, + } + + impl Deref for Schema { + type Target = tokio_postgres::Client; + fn deref(&self) -> &tokio_postgres::Client { + &self.client + } + } + + impl DerefMut for Schema { + fn deref_mut(&mut self) -> &mut tokio_postgres::Client { + &mut self.client + } + } + + struct Manager + Send + Sync> { + postgres_manager: Arc>, + index: AtomicUsize, + } + + #[async_trait] + impl ManagerTrait for Manager + where + T: MakeTlsConnect + Clone + Sync + Send + 'static, + T::Stream: Sync + Send, + T::TlsConnect: Sync + Send, + >::Future: Send, + { + async fn create(&self) -> Result { + let client = self.postgres_manager.create().await?; + + let conn_index = self.index.fetch_add(1, Ordering::SeqCst); + let schema_name = format!("test_{}", conn_index); + + // 1. Drop the schema if it exists - if a test failed before, the schema wouldn't have been removed + // 2. Create schema + // 3. Set the `search_path` (SET SCHEMA) - this way we don't have to define schema on queries or table creation + + let queries = format!( + "DROP SCHEMA IF EXISTS {0} CASCADE; CREATE SCHEMA {0}; SET SESSION SCHEMA '{0}';", + schema_name + ); + + let result = client.simple_query(&queries).await?; + + assert_eq!(3, result.len()); + assert!(matches!(result[0], SimpleQueryMessage::CommandComplete(..))); + assert!(matches!(result[1], SimpleQueryMessage::CommandComplete(..))); + assert!(matches!(result[2], SimpleQueryMessage::CommandComplete(..))); + + Ok(Schema { + name: schema_name, + client, + }) + } + + async fn recycle(&self, schema: &mut Schema) -> RecycleResult { + let queries = format!("DROP SCHEMA {0} CASCADE;", schema.name); + let result = schema.simple_query(&queries).await?; + assert_eq!(2, result.len()); + assert!(matches!(result[0], SimpleQueryMessage::CommandComplete(..))); + assert!(matches!(result[1], SimpleQueryMessage::CommandComplete(..))); + + self.postgres_manager.recycle(&mut schema.client).await + } + } + + pub async fn setup_test_migrations(client: &Client) -> Result<(), Error> { + let full_query: String = MIGRATIONS + .iter() + .map(|migration| { + use std::{ + fs::File, + io::{BufReader, Read}, + }; + let file = File::open(format!("migrations/{}/up.sql", migration)) + .expect("File migration couldn't be opened"); + let mut buf_reader = BufReader::new(file); + let mut contents = String::new(); + + buf_reader + .read_to_string(&mut contents) + .expect("File migration couldn't be read"); + contents + }) + .collect(); + + client.batch_execute(&full_query).await + } +} + #[cfg(test)] pub mod redis_pool { From b3578ea78a72f57661dc72335643380d9fb07656 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Wed, 14 Apr 2021 12:22:39 +0300 Subject: [PATCH 45/49] sentry - Makefile - setup postgres DB for running tests --- sentry/Makefile.toml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sentry/Makefile.toml b/sentry/Makefile.toml index 47ada5ba4..bcddf41f4 100644 --- a/sentry/Makefile.toml +++ b/sentry/Makefile.toml @@ -12,8 +12,11 @@ dependencies = [ "services-down", ] +[tasks.test] +env = { "POSTGRES_DB" = "sentry_leader" } + [tasks.services-up] -script = "docker-compose -f ../docker-compose.ci.yml up -d redis-leader" +script = "docker-compose -f ../docker-compose.ci.yml up -d redis-leader postgres-leader" [tasks.services-down] script = "docker-compose -f ../docker-compose.ci.yml down" From 9c39059ab975c72abae5a2352d40294e589d7072 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Wed, 14 Apr 2021 12:27:15 +0300 Subject: [PATCH 46/49] sentry - db - spendable - insert & fetch Spendable --- primitives/src/sentry.rs | 6 ++- sentry/src/db.rs | 1 + sentry/src/db/spendable.rs | 105 +++++++++++++++++++++++++++++++++++++ 3 files changed, 111 insertions(+), 1 deletion(-) create mode 100644 sentry/src/db/spendable.rs diff --git a/primitives/src/sentry.rs b/primitives/src/sentry.rs index 472ab5705..f3ae035ba 100644 --- a/primitives/src/sentry.rs +++ b/primitives/src/sentry.rs @@ -1,4 +1,8 @@ -use crate::{Address, BalancesMap, BigNum, Channel, ChannelId, IPFS, ValidatorId, targeting::Rules, validator::{ApproveState, Heartbeat, MessageTypes, NewState, Type as MessageType}}; +use crate::{ + targeting::Rules, + validator::{ApproveState, Heartbeat, MessageTypes, NewState, Type as MessageType}, + Address, BalancesMap, BigNum, Channel, ChannelId, ValidatorId, IPFS, +}; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use std::{collections::HashMap, fmt, hash::Hash}; diff --git a/sentry/src/db.rs b/sentry/src/db.rs index 57e8e0d97..e0a7dc628 100644 --- a/sentry/src/db.rs +++ b/sentry/src/db.rs @@ -8,6 +8,7 @@ use lazy_static::lazy_static; pub mod analytics; mod channel; pub mod event_aggregate; +pub mod spendable; mod validator_message; pub use self::channel::*; diff --git a/sentry/src/db/spendable.rs b/sentry/src/db/spendable.rs new file mode 100644 index 000000000..ca04f600b --- /dev/null +++ b/sentry/src/db/spendable.rs @@ -0,0 +1,105 @@ +use std::convert::TryFrom; + +use primitives::{channel_v5::Channel, Address, ChannelId, UnifiedNum}; +use tokio_postgres::{Client, Error, Row}; + +#[derive(Debug, PartialEq, Eq)] +pub struct Spendable { + spender: Address, + channel: Channel, + total: UnifiedNum, + still_on_create2: UnifiedNum, +} + +/// ```text +/// INSERT INTO spendable (spender, channel_id, channel, total, still_on_create2) +/// values ('0xce07CbB7e054514D590a0262C93070D838bFBA2e', '0x061d5e2a67d0a9a10f1c732bca12a676d83f79663a396f7d87b3e30b9b411088', '{}', 10.00000000, 2.00000000); +/// ``` +pub async fn insert_spendable(client: &Client, spendable: &Spendable) -> Result { + let stmt = client.prepare("INSERT INTO spendable (spender, channel_id, channel, total, still_on_create2) values ($1, $2, $3, $4, $5)").await?; + + let row = client + .execute( + &stmt, + &[ + &spendable.spender, + &spendable.channel.id(), + &spendable.channel, + &spendable.total, + &spendable.still_on_create2, + ], + ) + .await?; + + let is_inserted = row == 1; + Ok(is_inserted) +} + +/// ```text +/// SELECT spender, channel_id, channel, total, still_on_create2 FROM spendable +/// WHERE spender = $1 AND channel_id = $2 +/// ``` +pub async fn fetch_spendable( + client: &Client, + spender: &Address, + channel_id: &ChannelId, +) -> Result { + let statement = client.prepare("SELECT spender, channel_id, channel, total, still_on_create2 FROM spendable WHERE spender = $1 AND channel_id = $2").await?; + + let row = client.query_one(&statement, &[spender, channel_id]).await?; + + Spendable::try_from(row) +} + +impl TryFrom for Spendable { + type Error = Error; + + fn try_from(row: Row) -> Result { + Ok(Spendable { + spender: row.try_get("spender")?, + channel: row.try_get("channel")?, + total: row.try_get("total")?, + still_on_create2: row.try_get("still_on_create2")?, + }) + } +} + +#[cfg(test)] +mod test { + use primitives::{ + util::tests::prep_db::{ADDRESSES, DUMMY_CAMPAIGN}, + UnifiedNum, + }; + + use crate::db::postgres_pool::{setup_test_migrations, TESTS_POOL}; + + use super::*; + + #[tokio::test] + async fn it_inserts_and_fetches_spendable() { + let test_client = TESTS_POOL.get().await.unwrap(); + + setup_test_migrations(&test_client) + .await + .expect("Migrations should succeed"); + + let spendable = Spendable { + spender: ADDRESSES["user"], + channel: DUMMY_CAMPAIGN.channel.clone(), + total: UnifiedNum::from(100_000_000), + still_on_create2: UnifiedNum::from(500_000), + }; + let is_inserted = insert_spendable(&test_client, &spendable) + .await + .expect("Should succeed"); + + assert!(is_inserted); + + let fetched_spendable = + fetch_spendable(&test_client, &spendable.spender, &spendable.channel.id()) + .await + .expect("Should fetch successfully"); + + assert_eq!(spendable, fetched_spendable); + } +} From 2afcf32f5841158d2114d151daa7e3eecef58e5f Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Wed, 14 Apr 2021 15:45:45 +0300 Subject: [PATCH 47/49] primitives - Makafile - start a postgres container on local test flow --- primitives/Makefile.toml | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 primitives/Makefile.toml diff --git a/primitives/Makefile.toml b/primitives/Makefile.toml new file mode 100644 index 000000000..5ca86bf57 --- /dev/null +++ b/primitives/Makefile.toml @@ -0,0 +1,22 @@ +[tasks.dev-test-flow] +description = "Development testing flow will first format the code, and than run cargo build and test" +category = "Development" +dependencies = [ + "format-flow", + "format-toml-conditioned-flow", + "pre-build", + "build", + "post-build", + "services-up", + "test-flow", + "services-down", +] + +[tasks.test] +env = { "POSTGRES_DB" = "sentry_leader" } + +[tasks.services-up] +script = "docker-compose -f ../docker-compose.ci.yml up -d postgres-leader" + +[tasks.services-down] +script = "docker-compose -f ../docker-compose.ci.yml down" From 94fced61f27d9cf3e1a74dd1fea5a0eb2fc7454c Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Wed, 14 Apr 2021 16:09:21 +0300 Subject: [PATCH 48/49] sentry - spendable - use primitives::spender::Spendable --- primitives/src/spender.rs | 26 ++++++++++++++++++++++++-- sentry/src/db/spendable.rs | 36 +++++++++--------------------------- 2 files changed, 33 insertions(+), 29 deletions(-) diff --git a/primitives/src/spender.rs b/primitives/src/spender.rs index 2af887a0e..2cf32584e 100644 --- a/primitives/src/spender.rs +++ b/primitives/src/spender.rs @@ -2,14 +2,14 @@ use crate::{channel_v5::Channel, Address, BalancesMap, UnifiedNum}; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[serde(rename_all = "camelCase")] pub struct Deposit { pub total: UnifiedNum, pub still_on_create2: UnifiedNum, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] pub struct Spendable { pub spender: Address, pub channel: Channel, @@ -25,3 +25,25 @@ pub struct Aggregate { pub balances: BalancesMap, pub created: DateTime, } +#[cfg(feature = "postgres")] +mod postgres { + use std::convert::TryFrom; + use tokio_postgres::{Error, Row}; + + use super::*; + + impl TryFrom for Spendable { + type Error = Error; + + fn try_from(row: Row) -> Result { + Ok(Spendable { + spender: row.try_get("spender")?, + channel: row.try_get("channel")?, + deposit: Deposit { + total: row.try_get("total")?, + still_on_create2: row.try_get("still_on_create2")?, + }, + }) + } + } +} diff --git a/sentry/src/db/spendable.rs b/sentry/src/db/spendable.rs index ca04f600b..dd3b83f45 100644 --- a/sentry/src/db/spendable.rs +++ b/sentry/src/db/spendable.rs @@ -1,15 +1,7 @@ use std::convert::TryFrom; -use primitives::{channel_v5::Channel, Address, ChannelId, UnifiedNum}; -use tokio_postgres::{Client, Error, Row}; - -#[derive(Debug, PartialEq, Eq)] -pub struct Spendable { - spender: Address, - channel: Channel, - total: UnifiedNum, - still_on_create2: UnifiedNum, -} +use primitives::{spender::Spendable, Address, ChannelId}; +use tokio_postgres::{Client, Error}; /// ```text /// INSERT INTO spendable (spender, channel_id, channel, total, still_on_create2) @@ -25,8 +17,8 @@ pub async fn insert_spendable(client: &Client, spendable: &Spendable) -> Result< &spendable.spender, &spendable.channel.id(), &spendable.channel, - &spendable.total, - &spendable.still_on_create2, + &spendable.deposit.total, + &spendable.deposit.still_on_create2, ], ) .await?; @@ -51,22 +43,10 @@ pub async fn fetch_spendable( Spendable::try_from(row) } -impl TryFrom for Spendable { - type Error = Error; - - fn try_from(row: Row) -> Result { - Ok(Spendable { - spender: row.try_get("spender")?, - channel: row.try_get("channel")?, - total: row.try_get("total")?, - still_on_create2: row.try_get("still_on_create2")?, - }) - } -} - #[cfg(test)] mod test { use primitives::{ + spender::{Deposit, Spendable}, util::tests::prep_db::{ADDRESSES, DUMMY_CAMPAIGN}, UnifiedNum, }; @@ -86,8 +66,10 @@ mod test { let spendable = Spendable { spender: ADDRESSES["user"], channel: DUMMY_CAMPAIGN.channel.clone(), - total: UnifiedNum::from(100_000_000), - still_on_create2: UnifiedNum::from(500_000), + deposit: Deposit { + total: UnifiedNum::from(100_000_000), + still_on_create2: UnifiedNum::from(500_000), + }, }; let is_inserted = insert_spendable(&test_client, &spendable) .await From 910971a39e7e1b84bb9f9d36cdeb6f2313dd702c Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Thu, 22 Apr 2021 16:08:50 +0300 Subject: [PATCH 49/49] sentry - migrations - initial-tables - add campaign.creator index --- sentry/migrations/20190806011140_initial-tables/up.sql | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sentry/migrations/20190806011140_initial-tables/up.sql b/sentry/migrations/20190806011140_initial-tables/up.sql index 7db83d194..cb55715a8 100644 --- a/sentry/migrations/20190806011140_initial-tables/up.sql +++ b/sentry/migrations/20190806011140_initial-tables/up.sql @@ -18,6 +18,8 @@ CREATE TABLE campaigns ( CREATE INDEX idx_campaign_active_to ON campaigns (active_to); +CREATE INDEX idx_campaign_creator ON campaigns (creator); + CREATE INDEX idx_campaign_created ON campaigns (created); CREATE TABLE spendable (