From d91948e0e97d477dbce1f6d8972a982ce4b81550 Mon Sep 17 00:00:00 2001 From: Simeon Nakov Date: Tue, 23 Nov 2021 18:13:51 +0200 Subject: [PATCH 01/24] initial progress --- primitives/src/analytics.rs | 32 ++++- primitives/src/sentry.rs | 2 +- sentry/src/lib.rs | 61 ++------- sentry/src/routes/analytics.rs | 222 +++++++++++++++++++-------------- 4 files changed, 163 insertions(+), 154 deletions(-) diff --git a/primitives/src/analytics.rs b/primitives/src/analytics.rs index 283d3be18..7a7539b24 100644 --- a/primitives/src/analytics.rs +++ b/primitives/src/analytics.rs @@ -1,6 +1,7 @@ -use crate::{ChannelId, DomainError}; +use crate::{ChannelId, DomainError, sentry::DateHour, CampaignId, IPFS, Address}; use parse_display::Display; use serde::{Deserialize, Serialize}; +use chrono::{Utc, DateTime, serde::ts_seconds}; pub const ANALYTICS_QUERY_LIMIT: u32 = 200; @@ -66,6 +67,7 @@ pub mod postgres { } } +// TODO: Clean up query defaults/start/end #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] pub struct AnalyticsQuery { @@ -77,7 +79,29 @@ pub struct AnalyticsQuery { pub metric: String, #[serde(default = "default_timeframe")] pub timeframe: String, - pub segment_by_channel: Option, + pub segment_by: Option, + #[serde(with = "ts_seconds", default = "Utc::now", rename = "activeTo")] + pub start: DateTime, + #[serde(with = "ts_seconds", default = "Utc::now", rename = "activeTo")] + pub end: DateTime, + #[serde(default = "default_timezone")] + pub timezone: String, + #[serde(flatten)] + pub keys: AnalyticsQueryKeys, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct AnalyticsQueryKeys { + pub campaign_id: Option, + pub ad_unit: Option, + pub ad_slot: Option, + pub ad_slot_type: Option, + pub advertiser: Option
, + pub publisher: Option
, + pub hostname: Option, + pub country: Option, + pub os_name: Option, } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Display, Hash, Eq)] @@ -217,6 +241,10 @@ fn default_timeframe() -> String { "hour".into() } +fn default_timezone() -> String { + "UTC".into() +} + #[cfg(test)] mod test { use super::*; diff --git a/primitives/src/sentry.rs b/primitives/src/sentry.rs index 7238d7fe7..81545b030 100644 --- a/primitives/src/sentry.rs +++ b/primitives/src/sentry.rs @@ -6,7 +6,7 @@ use crate::{ Address, Balances, BigNum, CampaignId, Channel, ChannelId, UnifiedNum, ValidatorId, IPFS, }; use bytes::BytesMut; -use chrono::{Date, DateTime, Datelike, TimeZone, Timelike, Utc}; +use chrono::{DateTime, Datelike, TimeZone, Timelike, Utc}; use postgres_types::{accepts, to_sql_checked, FromSql, IsNull, ToSql, Type}; use serde::{Deserialize, Serialize}; use std::{collections::HashMap, error::Error, fmt, hash::Hash}; diff --git a/sentry/src/lib.rs b/sentry/src/lib.rs index d74cb7226..ec9c54cae 100644 --- a/sentry/src/lib.rs +++ b/sentry/src/lib.rs @@ -7,7 +7,7 @@ use hyper::{Body, Method, Request, Response, StatusCode}; use middleware::{ auth::{AuthRequired, Authenticate}, campaign::{CalledByCreator, CampaignLoad}, - channel::{ChannelLoad, GetChannelId}, + channel::ChannelLoad, cors::{cors, Cors}, Chain, Middleware, }; @@ -247,18 +247,13 @@ async fn analytics_router( app: &Application, ) -> Result, ResponseError> { use routes::analytics::{ - advanced_analytics, advertiser_analytics, analytics, publisher_analytics, + admin_analytics, advertiser_analytics, analytics, publisher_analytics, }; let (route, method) = (req.uri().path(), req.method()); match (route, method) { - ("/analytics", &Method::GET) => analytics(req, app).await, - ("/analytics/advanced", &Method::GET) => { - let req = AuthRequired.call(req, app).await?; - - advanced_analytics(req, app).await - } + ("/analytics", &Method::GET) => analytics(req, app, Some(vec!["country".to_string(), "ad_slot_type".to_string()]), None).await, ("/analytics/for-advertiser", &Method::GET) => { let req = AuthRequired.call(req, app).await?; advertiser_analytics(req, app).await @@ -268,52 +263,10 @@ async fn analytics_router( publisher_analytics(req, app).await } - (route, &Method::GET) => { - if let Some(caps) = ANALYTICS_BY_CHANNEL_ID.captures(route) { - let param = RouteParams(vec![caps - .get(1) - .map_or("".to_string(), |m| m.as_str().to_string())]); - req.extensions_mut().insert(param); - - // apply middlewares - req = Chain::new() - .chain(ChannelLoad) - .chain(GetChannelId) - .apply(req, app) - .await?; - - analytics(req, app).await - } else if let Some(caps) = ADVERTISER_ANALYTICS_BY_CHANNEL_ID.captures(route) { - let param = RouteParams(vec![caps - .get(1) - .map_or("".to_string(), |m| m.as_str().to_string())]); - req.extensions_mut().insert(param); - - // apply middlewares - req = Chain::new() - .chain(AuthRequired) - .chain(GetChannelId) - .apply(req, app) - .await?; - - advertiser_analytics(req, app).await - } else if let Some(caps) = PUBLISHER_ANALYTICS_BY_CHANNEL_ID.captures(route) { - let param = RouteParams(vec![caps - .get(1) - .map_or("".to_string(), |m| m.as_str().to_string())]); - req.extensions_mut().insert(param); - - // apply middlewares - req = Chain::new() - .chain(AuthRequired) - .chain(GetChannelId) - .apply(req, app) - .await?; - - publisher_analytics(req, app).await - } else { - Err(ResponseError::NotFound) - } + ("/analytics/for-admin", &Method::GET) => { + let req = AuthRequired.call(req, app).await?; + + admin_analytics(req, app).await } _ => Err(ResponseError::NotFound), } diff --git a/sentry/src/routes/analytics.rs b/sentry/src/routes/analytics.rs index cfb7a72a6..fc6810a2e 100644 --- a/sentry/src/routes/analytics.rs +++ b/sentry/src/routes/analytics.rs @@ -6,73 +6,99 @@ use hyper::{Body, Request, Response}; use primitives::{ adapter::Adapter, analytics::{AnalyticsQuery, AnalyticsResponse}, + sentry::DateHour, ChannelId, }; use redis::aio::MultiplexedConnection; use slog::{error, Logger}; -pub async fn publisher_analytics( +pub const ALLOWED_KEYS: [&'static str; 9] = [ + "campaignId", + "adUnit", + "adSlot", + "adSlotType", + "advertiser", + "publisher", + "hostname", + "country", + "osName" +]; + +// TODO: Convert timeframe to enum and add this as an enum method +pub fn get_period_in_hours(timeframe: String) -> u64 { + let hour = 1; + let day = 24 * hour; + let year = 365 * day; + if timeframe == "day" { + day + } else if timeframe == "week" { + 7 * day + } else if timeframe == "month" { + year / 12 + } else if timeframe == "year" { + year + } else { + day + } +} + +pub fn get_time_period_query_clause(start: Option, end: Option>, period: u64, event_type: String, metric: String, timezone: String) -> String { + // start && !Number.isNaN(new Date(start)) ? new Date(start) : new Date(Date.now() - period), + let start = match start { + Some(start) => { + DateHour::from() + }, + None => DateHour::now() - + } +} + +pub async fn analytics( req: Request, app: &Application, + allowed_keys: Option>, + auth_as_key: Option, ) -> Result, ResponseError> { - let auth = req - .extensions() - .get::() - .ok_or(ResponseError::Unauthorized)? - .clone(); + let query = serde_urlencoded::from_str::(req.uri().query().unwrap_or(""))?; - let analytics_type = AnalyticsType::Publisher { auth }; + let period = get_period_in_hours(query.timeframe); - process_analytics(req, app, analytics_type) - .await - .map(success_response) + + todo!(); } -pub async fn analytics( + +// TODO: remove each of these +pub async fn publisher_analytics( req: Request, app: &Application, ) -> Result, ResponseError> { - let request_uri = req.uri().to_string(); - let redis = app.redis.clone(); - - match redis::cmd("GET") - .arg(&request_uri) - .query_async::<_, Option>(&mut redis.clone()) - .await - { - Ok(Some(response)) => Ok(success_response(response)), - _ => { - // checks if /:id route param is present - let cache_timeframe = match req.extensions().get::() { - Some(_) => 600, - None => 300, - }; - let response = process_analytics(req, app, AnalyticsType::Global).await?; - cache( - &redis.clone(), - request_uri, - &response, - cache_timeframe, - &app.logger, - ) - .await; - Ok(success_response(response)) - } - } + todo!(); + // let auth = req + // .extensions() + // .get::() + // .ok_or(ResponseError::Unauthorized)? + // .clone(); + + // let analytics_type = AnalyticsType::Publisher { auth }; + + // process_analytics(req, app, analytics_type) + // .await + // .map(success_response) } pub async fn advertiser_analytics( req: Request, app: &Application, ) -> Result, ResponseError> { - let sess = req.extensions().get::(); - let analytics_type = AnalyticsType::Advertiser { - auth: sess.ok_or(ResponseError::Unauthorized)?.to_owned(), - }; - - process_analytics(req, app, analytics_type) - .await - .map(success_response) + todo!(); + // let sess = req.extensions().get::(); + // let analytics_type = AnalyticsType::Advertiser { + // auth: sess.ok_or(ResponseError::Unauthorized)?.to_owned(), + // }; + + // process_analytics(req, app, analytics_type) + // .await + // .map(success_response) } pub async fn process_analytics( @@ -80,67 +106,69 @@ pub async fn process_analytics( app: &Application, analytics_type: AnalyticsType, ) -> Result { - let query = serde_urlencoded::from_str::(req.uri().query().unwrap_or(""))?; - query - .is_valid() - .map_err(|e| ResponseError::BadRequest(e.to_string()))?; + todo!(); + // let query = serde_urlencoded::from_str::(req.uri().query().unwrap_or(""))?; + // query + // .is_valid() + // .map_err(|e| ResponseError::BadRequest(e.to_string()))?; - let channel_id = req.extensions().get::(); + // let channel_id = req.extensions().get::(); - let segment_channel = query.segment_by_channel.is_some(); + // let segment_channel = query.segment_by_channel.is_some(); - let limit = query.limit; + // let limit = query.limit; - let aggr = get_analytics( - query, - &app.pool, - analytics_type, - segment_channel, - channel_id, - ) - .await?; + // let aggr = get_analytics( + // query, + // &app.pool, + // analytics_type, + // segment_channel, + // channel_id, + // ) + // .await?; - let response = AnalyticsResponse { aggr, limit }; + // let response = AnalyticsResponse { aggr, limit }; - serde_json::to_string(&response) - .map_err(|_| ResponseError::BadRequest("error occurred; try again later".to_string())) + // serde_json::to_string(&response) + // .map_err(|_| ResponseError::BadRequest("error occurred; try again later".to_string())) } -pub async fn advanced_analytics( +pub async fn admin_analytics( req: Request, app: &Application, ) -> Result, ResponseError> { - let auth = req.extensions().get::().expect("auth is required"); - let advertiser_channels = advertiser_channel_ids(&app.pool, &auth.uid).await?; - - let query = serde_urlencoded::from_str::(req.uri().query().unwrap_or(""))?; - - let response = get_advanced_reports( - &app.redis, - &query.event_type, - &auth.uid, - &advertiser_channels, - ) - .await - .map_err(|_| ResponseError::BadRequest("error occurred; try again later".to_string()))?; - - Ok(success_response(serde_json::to_string(&response)?)) + todo!(); + // let auth = req.extensions().get::().expect("auth is required"); + // let advertiser_channels = advertiser_channel_ids(&app.pool, &auth.uid).await?; + + // let query = serde_urlencoded::from_str::(req.uri().query().unwrap_or(""))?; + + // let response = get_advanced_reports( + // &app.redis, + // &query.event_type, + // &auth.uid, + // &advertiser_channels, + // ) + // .await + // .map_err(|_| ResponseError::BadRequest("error occurred; try again later".to_string()))?; + + // Ok(success_response(serde_json::to_string(&response)?)) } -async fn cache( - redis: &MultiplexedConnection, - key: String, - value: &str, - timeframe: i32, - logger: &Logger, -) { - if let Err(err) = redis::cmd("SETEX") - .arg(&key) - .arg(timeframe) - .arg(value) - .query_async::<_, ()>(&mut redis.clone()) - .await - { - error!(&logger, "Server error: {}", err; "module" => "analytics-cache"); - } -} +// async fn cache( +// redis: &MultiplexedConnection, +// key: String, +// value: &str, +// timeframe: i32, +// logger: &Logger, +// ) { +// if let Err(err) = redis::cmd("SETEX") +// .arg(&key) +// .arg(timeframe) +// .arg(value) +// .query_async::<_, ()>(&mut redis.clone()) +// .await +// { +// error!(&logger, "Server error: {}", err; "module" => "analytics-cache"); +// } +// } From cbc4cd62cbf557b42682d100f3ce6e8771717272 Mon Sep 17 00:00:00 2001 From: Simeon Nakov Date: Thu, 25 Nov 2021 22:34:34 +0200 Subject: [PATCH 02/24] initial implementation for analytics v5 routes finished --- docs/config/dev.toml | 1 + docs/config/ganache.toml | 2 +- docs/config/prod.toml | 2 +- primitives/src/analytics.rs | 160 ++++++++++++++--------- primitives/src/config.rs | 1 + primitives/src/sentry.rs | 28 +++- sentry/src/db/analytics.rs | 232 +++++---------------------------- sentry/src/lib.rs | 41 +++--- sentry/src/middleware/auth.rs | 24 ++++ sentry/src/routes/analytics.rs | 226 +++++++++++++------------------- 10 files changed, 290 insertions(+), 427 deletions(-) diff --git a/docs/config/dev.toml b/docs/config/dev.toml index 732afb488..93ad2c134 100644 --- a/docs/config/dev.toml +++ b/docs/config/dev.toml @@ -37,6 +37,7 @@ ethereum_adapter_relayer = 'https://goerli-relayer.adex.network' creators_whitelist = [] validators_whitelist = [] +admins = ['0xce07CbB7e054514D590a0262C93070D838bFBA2e'] [[token_address_whitelist]] # DAI diff --git a/docs/config/ganache.toml b/docs/config/ganache.toml index 267af6bcb..63df5deb9 100644 --- a/docs/config/ganache.toml +++ b/docs/config/ganache.toml @@ -41,7 +41,7 @@ ethereum_adapter_relayer = 'http://localhost:8888' creators_whitelist = [] validators_whitelist = [] - +admins = ['0xce07CbB7e054514D590a0262C93070D838bFBA2e'] [[token_address_whitelist]] # Mocked TOKEN diff --git a/docs/config/prod.toml b/docs/config/prod.toml index a79398dbc..9c40ce9ef 100644 --- a/docs/config/prod.toml +++ b/docs/config/prod.toml @@ -38,7 +38,7 @@ ethereum_adapter_relayer = 'https://relayer.adex.network' creators_whitelist = [] validators_whitelist = [] - +admins = ['0x5d6A3F1AD7b124ecDFDf4841D9bB246eD5fBF04c'] [[token_address_whitelist]] # DAI diff --git a/primitives/src/analytics.rs b/primitives/src/analytics.rs index d008b7fa6..e14d571c9 100644 --- a/primitives/src/analytics.rs +++ b/primitives/src/analytics.rs @@ -1,7 +1,7 @@ -use crate::{ChannelId, DomainError, sentry::DateHour, CampaignId, IPFS, Address}; +use crate::ChannelId; +use chrono::{DateTime, Utc}; use parse_display::Display; use serde::{Deserialize, Serialize}; -use chrono::{Utc, DateTime, serde::ts_seconds}; pub const ANALYTICS_QUERY_LIMIT: u32 = 200; @@ -78,34 +78,74 @@ pub struct AnalyticsQuery { #[serde(default = "default_event_type")] pub event_type: String, #[serde(default = "default_metric")] - pub metric: String, + pub metric: Metric, #[serde(default = "default_timeframe")] - pub timeframe: String, + pub timeframe: Timeframe, pub segment_by: Option, - #[serde(with = "ts_seconds", default = "Utc::now", rename = "activeTo")] - pub start: DateTime, - #[serde(with = "ts_seconds", default = "Utc::now", rename = "activeTo")] - pub end: DateTime, - #[serde(default = "default_timezone")] - pub timezone: String, - #[serde(flatten)] - pub keys: AnalyticsQueryKeys, -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct AnalyticsQueryKeys { - pub campaign_id: Option, - pub ad_unit: Option, - pub ad_slot: Option, + pub start: Option>, + pub end: Option>, + // #[serde(default = "default_timezone")] + // pub timezone: String, + pub campaign_id: Option, + pub ad_unit: Option, + pub ad_slot: Option, pub ad_slot_type: Option, - pub advertiser: Option
, - pub publisher: Option
, + pub advertiser: Option, + pub publisher: Option, pub hostname: Option, - pub country: Option, + pub country: Option, pub os_name: Option, } +impl AnalyticsQuery { + pub fn keys(&self) -> Vec { + let mut keys = vec![]; + if self.campaign_id.is_some() { + keys.push("campaignId".into()) + } + if self.ad_unit.is_some() { + keys.push("adUnit".into()) + } + if self.ad_slot.is_some() { + keys.push("adslot".into()) + } + if self.ad_slot_type.is_some() { + keys.push("adSlotType".into()) + } + if self.advertiser.is_some() { + keys.push("advertiser".into()) + } + if self.publisher.is_some() { + keys.push("publisher".into()) + } + if self.hostname.is_some() { + keys.push("hostname".into()) + } + if self.campaign_id.is_some() { + keys.push("country".into()) + } + if self.campaign_id.is_some() { + keys.push("osName".into()) + } + keys + } + + pub fn try_get_key(&self, key: &str) -> &Option { + match key { + "campaign_id" => &self.campaign_id, + "ad_unit" => &self.ad_unit, + "ad_slot" => &self.ad_slot, + "ad_slot_type" => &self.ad_slot_type, + "advertiser" => &self.advertiser, + "publisher" => &self.publisher, + "hostname" => &self.hostname, + "country" => &self.country, + "os_name" => &self.os_name, + _ => &None, + } + } +} + #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Display, Hash, Eq)] #[serde(untagged, into = "String", from = "String")] pub enum OperatingSystem { @@ -115,6 +155,34 @@ pub enum OperatingSystem { Other, } +#[derive(Debug, Clone, Serialize, Deserialize, Display)] +pub enum Timeframe { + Year, + Month, + Week, + Day, +} + +#[derive(Debug, Clone, Serialize, Deserialize, Display)] +pub enum Metric { + Count, + Paid, +} + +impl Timeframe { + pub fn get_period_in_hours(&self) -> i64 { + let hour = 1; + let day = 24 * hour; + let year = 365 * day; + match self { + Timeframe::Day => day, + Timeframe::Week => 7 * day, + Timeframe::Month => year / 12, + Timeframe::Year => year, + } + } +} + impl Default for OperatingSystem { fn default() -> Self { Self::Other @@ -195,38 +263,6 @@ impl OperatingSystem { } } -impl AnalyticsQuery { - pub fn is_valid(&self) -> Result<(), DomainError> { - let valid_event_types = ["IMPRESSION", "CLICK"]; - let valid_metric = ["eventPayouts", "eventCounts"]; - let valid_timeframe = ["year", "month", "week", "day", "hour"]; - - if !valid_event_types.contains(&self.event_type.as_str()) { - Err(DomainError::InvalidArgument(format!( - "invalid event_type, possible values are: {}", - valid_event_types.join(" ,") - ))) - } else if !valid_metric.contains(&self.metric.as_str()) { - Err(DomainError::InvalidArgument(format!( - "invalid metric, possible values are: {}", - valid_metric.join(" ,") - ))) - } else if !valid_timeframe.contains(&self.timeframe.as_str()) { - Err(DomainError::InvalidArgument(format!( - "invalid timeframe, possible values are: {}", - valid_timeframe.join(" ,") - ))) - } else if self.limit > ANALYTICS_QUERY_LIMIT { - Err(DomainError::InvalidArgument(format!( - "invalid limit {}, maximum value 200", - self.limit - ))) - } else { - Ok(()) - } - } -} - fn default_limit() -> u32 { 100 } @@ -235,17 +271,17 @@ fn default_event_type() -> String { "IMPRESSION".into() } -fn default_metric() -> String { - "eventCounts".into() +fn default_metric() -> Metric { + Metric::Count } -fn default_timeframe() -> String { - "hour".into() +fn default_timeframe() -> Timeframe { + Timeframe::Day } -fn default_timezone() -> String { - "UTC".into() -} +// fn default_timezone() -> String { +// "UTC".into() +// } #[cfg(test)] mod test { diff --git a/primitives/src/config.rs b/primitives/src/config.rs index 62519dc73..48ff3272c 100644 --- a/primitives/src/config.rs +++ b/primitives/src/config.rs @@ -81,6 +81,7 @@ pub struct Config { pub ethereum_adapter_relayer: String, pub creators_whitelist: Vec
, pub validators_whitelist: Vec, + pub admins: Vec, #[serde(deserialize_with = "deserialize_token_whitelist")] pub token_address_whitelist: HashMap, } diff --git a/primitives/src/sentry.rs b/primitives/src/sentry.rs index 3ec328938..d090de172 100644 --- a/primitives/src/sentry.rs +++ b/primitives/src/sentry.rs @@ -5,11 +5,10 @@ use crate::{ validator::{ApproveState, Heartbeat, MessageTypes, NewState, Type as MessageType}, Address, Balances, BigNum, CampaignId, Channel, ChannelId, UnifiedNum, ValidatorId, IPFS, }; -use bytes::BytesMut; -use chrono::{DateTime, Datelike, TimeZone, Timelike, Utc}; -use postgres_types::{accepts, to_sql_checked, FromSql, IsNull, ToSql, Type}; -use serde::{Deserialize, Serialize}; -use std::{collections::HashMap, error::Error, fmt, hash::Hash}; +use chrono::{Date, DateTime, NaiveDate, TimeZone, Timelike, Utc}; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use std::{collections::HashMap, fmt, hash::Hash}; +use thiserror::Error; #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(rename_all = "camelCase")] @@ -204,6 +203,14 @@ pub struct Analytics { pub payout_count: u32, } +// TODO: Verify this is the needed output +#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct FetchedAnalytics { + pub payout_amount: Option, + pub payout_count: Option, +} + #[derive(Debug, Error, PartialEq, Eq)] #[error("Minutes ({minutes}), seconds ({seconds}) & nanoseconds ({nanoseconds}) should all be set to 0 (zero)")] pub struct DateHourError { @@ -752,7 +759,7 @@ pub mod campaign_create { #[cfg(feature = "postgres")] mod postgres { - use super::{Analytics, DateHour, MessageResponse, ValidatorMessage}; + use super::{Analytics, DateHour, FetchedAnalytics, MessageResponse, ValidatorMessage}; use crate::{ sentry::EventAggregate, validator::{messages::Type as MessageType, MessageTypes}, @@ -893,6 +900,15 @@ mod postgres { accepts!(TIMESTAMPTZ); to_sql_checked!(); } + + impl From<&Row> for FetchedAnalytics { + fn from(row: &Row) -> Self { + Self { + payout_amount: row.get("payout_amount"), + payout_count: Some(row.get::<_, i32>("payout_count").unsigned_abs()), + } + } + } } #[cfg(test)] diff --git a/sentry/src/db/analytics.rs b/sentry/src/db/analytics.rs index 1105f2629..e4e8d1bfd 100644 --- a/sentry/src/db/analytics.rs +++ b/sentry/src/db/analytics.rs @@ -1,230 +1,70 @@ -use crate::{epoch, Auth}; use chrono::Utc; use primitives::{ - analytics::{AnalyticsData, AnalyticsQuery, ANALYTICS_QUERY_LIMIT}, - sentry::{ - AdvancedAnalyticsResponse, Analytics, ChannelReport, PublisherReport, UpdateAnalytics, - }, - ChannelId, ValidatorId, + analytics::AnalyticsQuery, + sentry::{Analytics, DateHour, FetchedAnalytics, UpdateAnalytics}, + ValidatorId, }; -use redis::{aio::MultiplexedConnection, cmd}; -use std::collections::HashMap; -use tokio_postgres::types::ToSql; use super::{DbPool, PoolError}; -pub enum AnalyticsType { - Advertiser { auth: Auth }, - Global, - Publisher { auth: Auth }, -} - -pub async fn advertiser_channel_ids( - pool: &DbPool, - creator: &ValidatorId, -) -> Result, PoolError> { - let client = pool.get().await?; - - let stmt = client - .prepare("SELECT id FROM channels WHERE creator = $1") - .await?; - let rows = client.query(&stmt, &[creator]).await?; - - let channel_ids: Vec = rows.iter().map(ChannelId::from).collect(); - Ok(channel_ids) -} - -fn metric_to_column(metric: &str) -> String { - match metric { - "eventCounts" => "count".to_string(), - "eventPayouts" => "payout".to_string(), - _ => "count".to_string(), - } -} - pub async fn get_analytics( - query: AnalyticsQuery, pool: &DbPool, - analytics_type: AnalyticsType, - segment_by_channel: bool, - channel_id: Option<&ChannelId>, -) -> Result, PoolError> { + start_date: DateHour, + end_date: Option>, + query: &AnalyticsQuery, + auth_as_key: Option, + auth_uid: ValidatorId, + limit: u32, +) -> Result, PoolError> { let client = pool.get().await?; - // converts metric to column - let metric = metric_to_column(&query.metric); + let mut where_clauses = vec![format!("time >= {}", start_date.to_datetime())]; - let mut params = Vec::<&(dyn ToSql + Sync)>::new(); - let applied_limit = query.limit.min(ANALYTICS_QUERY_LIMIT); - let (interval, period) = get_time_frame(&query.timeframe); - let time_limit = Utc::now().timestamp() - period; - - let mut where_clauses = vec![format!("created > to_timestamp({})", time_limit)]; + for key in query.keys() { + let key_value = query.try_get_key(&key).as_ref().unwrap(); + where_clauses.push(format!("{} = {}", key, key_value)); + } - params.push(&query.event_type); + if let Some(auth_as_key) = auth_as_key { + where_clauses.push(format!("{} = {}", auth_as_key, auth_uid)) + } + if let Some(end_date) = end_date { + where_clauses.push(format!("time <= {}", end_date.to_datetime())); + } where_clauses.extend(vec![ - format!("event_type = ${}", params.len()), - format!("{} IS NOT NULL", metric), + format!("event_type = ${}", query.event_type), + format!("{} IS NOT NULL", query.metric.to_string()), ]); - if let Some(id) = channel_id { - where_clauses.push(format!("channel_id = '{}'", id)); - } - - let mut group_clause = "time".to_string(); - let mut select_clause = match analytics_type { - AnalyticsType::Advertiser { auth } => { - if channel_id.is_none() { - where_clauses.push(format!( - "channel_id IN (SELECT id FROM channels WHERE creator = '{}')", - auth.uid - )); - } - - format!( - "SUM({}::numeric)::varchar as value, (extract(epoch from created) - (MOD( CAST (extract(epoch from created) AS NUMERIC), {}))) as time", - metric, interval - ) - } - AnalyticsType::Global => { - where_clauses.push("earner IS NULL".to_string()); - - format!( - "SUM({}::numeric)::varchar as value, (extract(epoch from created) - (MOD( CAST (extract(epoch from created) AS NUMERIC), {}))) as time", - metric, interval - ) - } - AnalyticsType::Publisher { auth } => { - where_clauses.push(format!("earner = '{}'", auth.uid)); - - format!( - "SUM({}::numeric)::varchar as value, (extract(epoch from created) - (MOD( CAST (extract(epoch from created) AS NUMERIC), {}))) as time", - metric, interval - ) - } + let (select_clause, group_clause) = match &query.segment_by { + Some(segment_by) => ( + format!("{}, time, {}", &query.metric.to_string(), segment_by), + format!("time, {}", segment_by), + ), + None => ( + format!("{}, time", query.metric.to_string()), + "time".to_string(), + ), }; - if segment_by_channel { - select_clause = format!("{}, channel_id", select_clause); - group_clause = format!("{}, channel_id", group_clause); - } - let sql_query = format!( - "SELECT {} FROM event_aggregates WHERE {} GROUP BY {} LIMIT {}", + "SELECT {} FROM analytics WHERE {} GROUP BY {} LIMIT {}", select_clause, where_clauses.join(" AND "), group_clause, - applied_limit, + limit, ); // execute query let stmt = client.prepare(&sql_query).await?; - let rows = client.query(&stmt, ¶ms).await?; + let rows = client.query(&stmt, &[]).await?; - let analytics: Vec = rows.iter().map(AnalyticsData::from).collect(); + let analytics: Vec = rows.iter().map(FetchedAnalytics::from).collect(); Ok(analytics) } -fn get_time_frame(timeframe: &str) -> (i64, i64) { - let minute = 60 * 1000; - let hour = 60 * minute; - let day = 24 * hour; - - match timeframe { - "year" => (30 * day, 365 * day), - "month" => (day, 30 * day), - "week" => (6 * hour, 7 * day), - "day" => (hour, day), - "hour" => (minute, hour), - _ => (hour, day), - } -} - -async fn stat_pair( - mut conn: MultiplexedConnection, - key: &str, -) -> Result, Box> { - let data = cmd("ZRANGE") - .arg(key) - .arg(0_u64) - .arg(-1_i64) - .arg("WITHSCORES") - .query_async::<_, Vec>(&mut conn) - .await?; - - Ok(data - .chunks(2) - .map(|chunk: &[String]| { - ( - chunk[0].clone(), - chunk[1].parse::().expect("should parse value"), - ) - }) - .collect()) -} - -pub async fn get_advanced_reports( - redis: &MultiplexedConnection, - event_type: &str, - publisher: &ValidatorId, - channel_ids: &[ChannelId], -) -> Result> { - let publisher_reports = [ - PublisherReport::AdUnit, - PublisherReport::AdSlot, - PublisherReport::AdSlotPay, - PublisherReport::Country, - PublisherReport::Hostname, - ]; - - let mut publisher_stats: HashMap> = HashMap::new(); - - for publisher_report in publisher_reports.iter() { - let pair = match publisher_report { - PublisherReport::Country => format!( - "{}:{}:{}:{}", - epoch().floor(), - publisher_report, - event_type, - publisher - ), - _ => format!("{}:{}:{}", publisher_report, event_type, publisher), - }; - let result = stat_pair(redis.clone(), &pair).await?; - publisher_stats.insert(publisher_report.clone(), result); - } - - let mut by_channel_stats = HashMap::new(); - - let channel_reports = [ - ChannelReport::AdUnit, - ChannelReport::Hostname, - ChannelReport::HostnamePay, - ]; - - for channel_id in channel_ids { - let mut channel_stat = HashMap::new(); - - for channel_report in channel_reports.iter() { - let result = stat_pair( - redis.clone(), - &format!("{}:{}:{}", channel_report, event_type, channel_id), - ) - .await?; - channel_stat.insert(channel_report.clone(), result); - } - - by_channel_stats.insert(channel_id.to_owned(), channel_stat); - } - - Ok(AdvancedAnalyticsResponse { - by_channel_stats, - publisher_stats, - }) -} - /// This will update a record when it's present by incrementing its payout_amount and payout_count fields pub async fn update_analytics( pool: &DbPool, diff --git a/sentry/src/lib.rs b/sentry/src/lib.rs index 5655b3050..8ee174b46 100644 --- a/sentry/src/lib.rs +++ b/sentry/src/lib.rs @@ -5,7 +5,7 @@ use chrono::Utc; use hyper::{Body, Method, Request, Response, StatusCode}; use middleware::{ - auth::{AuthRequired, Authenticate}, + auth::{AuthRequired, Authenticate, IsAdmin}, campaign::{CalledByCreator, CampaignLoad}, channel::ChannelLoad, cors::{cors, Cors}, @@ -63,17 +63,6 @@ static CHANNEL_EVENTS_AGGREGATES: Lazy = Lazy::new(|| { Regex::new(r"^/v5/channel/0x([a-zA-Z0-9]{64})/events-aggregates/?$") .expect("The regex should be valid") }); -static ANALYTICS_BY_CHANNEL_ID: Lazy = Lazy::new(|| { - Regex::new(r"^/analytics/0x([a-zA-Z0-9]{64})/?$").expect("The regex should be valid") -}); -static ADVERTISER_ANALYTICS_BY_CHANNEL_ID: Lazy = Lazy::new(|| { - Regex::new(r"^/analytics/for-advertiser/0x([a-zA-Z0-9]{64})/?$") - .expect("The regex should be valid") -}); -static PUBLISHER_ANALYTICS_BY_CHANNEL_ID: Lazy = Lazy::new(|| { - Regex::new(r"^/analytics/for-publisher/0x([a-zA-Z0-9]{64})/?$") - .expect("The regex should be valid") -}); static CHANNEL_SPENDER_LEAF_AND_TOTAL_DEPOSITED: Lazy = Lazy::new(|| { Regex::new(r"^/v5/channel/0x([a-zA-Z0-9]{64})/spender/0x([a-zA-Z0-9]{40})/?$") .expect("This regex should be valid") @@ -229,27 +218,35 @@ async fn analytics_router( mut req: Request, app: &Application, ) -> Result, ResponseError> { - use routes::analytics::{ - admin_analytics, advertiser_analytics, analytics, publisher_analytics, - }; + use routes::analytics::analytics; let (route, method) = (req.uri().path(), req.method()); match (route, method) { - ("/analytics", &Method::GET) => analytics(req, app, Some(vec!["country".to_string(), "ad_slot_type".to_string()]), None).await, + ("/analytics", &Method::GET) => { + analytics( + req, + app, + Some(vec!["country".to_string(), "ad_slot_type".to_string()]), + None, + ) + .await + } ("/analytics/for-advertiser", &Method::GET) => { let req = AuthRequired.call(req, app).await?; - advertiser_analytics(req, app).await + analytics(req, app, None, Some("advertiser".into())).await } ("/analytics/for-publisher", &Method::GET) => { let req = AuthRequired.call(req, app).await?; - - publisher_analytics(req, app).await + analytics(req, app, None, Some("publisher".into())).await } ("/analytics/for-admin", &Method::GET) => { - let req = AuthRequired.call(req, app).await?; - - admin_analytics(req, app).await + req = Chain::new() + .chain(AuthRequired) + .chain(IsAdmin) + .apply(req, app) + .await?; + analytics(req, app, None, None).await } _ => Err(ResponseError::NotFound), } diff --git a/sentry/src/middleware/auth.rs b/sentry/src/middleware/auth.rs index 5282d7370..1ebbc15d2 100644 --- a/sentry/src/middleware/auth.rs +++ b/sentry/src/middleware/auth.rs @@ -47,6 +47,30 @@ impl Middleware for AuthRequired { } } +#[derive(Debug)] +pub struct IsAdmin; + +#[async_trait] +impl Middleware for IsAdmin { + async fn call<'a>( + &self, + request: Request, + application: &'a Application, + ) -> Result, ResponseError> { + let auth = request + .extensions() + .get::() + .expect("request should have session") + .to_owned(); + + if !application.config.admins.contains(&auth.uid.to_string()) { + return Err(ResponseError::Unauthorized); + } + + Ok(request) + } +} + /// Check `Authorization` header for `Bearer` scheme with `Adapter::session_from_token`. /// If the `Adapter` fails to create an `AdapterSession`, `ResponseError::BadRequest` will be returned. async fn for_request( diff --git a/sentry/src/routes/analytics.rs b/sentry/src/routes/analytics.rs index fc6810a2e..c0bdfd54d 100644 --- a/sentry/src/routes/analytics.rs +++ b/sentry/src/routes/analytics.rs @@ -1,56 +1,12 @@ -use crate::{ - db::analytics::{advertiser_channel_ids, get_advanced_reports, get_analytics, AnalyticsType}, - success_response, Application, Auth, ResponseError, RouteParams, -}; +use crate::{db::analytics::get_analytics, success_response, Application, Auth, ResponseError}; +use chrono::{Duration, Utc}; use hyper::{Body, Request, Response}; use primitives::{ adapter::Adapter, - analytics::{AnalyticsQuery, AnalyticsResponse}, - sentry::DateHour, - ChannelId, + analytics::{AnalyticsQuery, Metric, ANALYTICS_QUERY_LIMIT}, + sentry::{DateHour, FetchedAnalytics}, + UnifiedNum, }; -use redis::aio::MultiplexedConnection; -use slog::{error, Logger}; - -pub const ALLOWED_KEYS: [&'static str; 9] = [ - "campaignId", - "adUnit", - "adSlot", - "adSlotType", - "advertiser", - "publisher", - "hostname", - "country", - "osName" -]; - -// TODO: Convert timeframe to enum and add this as an enum method -pub fn get_period_in_hours(timeframe: String) -> u64 { - let hour = 1; - let day = 24 * hour; - let year = 365 * day; - if timeframe == "day" { - day - } else if timeframe == "week" { - 7 * day - } else if timeframe == "month" { - year / 12 - } else if timeframe == "year" { - year - } else { - day - } -} - -pub fn get_time_period_query_clause(start: Option, end: Option>, period: u64, event_type: String, metric: String, timezone: String) -> String { - // start && !Number.isNaN(new Date(start)) ? new Date(start) : new Date(Date.now() - period), - let start = match start { - Some(start) => { - DateHour::from() - }, - None => DateHour::now() - - } -} pub async fn analytics( req: Request, @@ -60,99 +16,91 @@ pub async fn analytics( ) -> Result, ResponseError> { let query = serde_urlencoded::from_str::(req.uri().query().unwrap_or(""))?; - let period = get_period_in_hours(query.timeframe); - - - todo!(); -} - - -// TODO: remove each of these -pub async fn publisher_analytics( - req: Request, - app: &Application, -) -> Result, ResponseError> { - todo!(); - // let auth = req - // .extensions() - // .get::() - // .ok_or(ResponseError::Unauthorized)? - // .clone(); - - // let analytics_type = AnalyticsType::Publisher { auth }; - - // process_analytics(req, app, analytics_type) - // .await - // .map(success_response) -} - -pub async fn advertiser_analytics( - req: Request, - app: &Application, -) -> Result, ResponseError> { - todo!(); - // let sess = req.extensions().get::(); - // let analytics_type = AnalyticsType::Advertiser { - // auth: sess.ok_or(ResponseError::Unauthorized)?.to_owned(), - // }; - - // process_analytics(req, app, analytics_type) - // .await - // .map(success_response) -} - -pub async fn process_analytics( - req: Request, - app: &Application, - analytics_type: AnalyticsType, -) -> Result { - todo!(); - // let query = serde_urlencoded::from_str::(req.uri().query().unwrap_or(""))?; - // query - // .is_valid() - // .map_err(|e| ResponseError::BadRequest(e.to_string()))?; - - // let channel_id = req.extensions().get::(); - - // let segment_channel = query.segment_by_channel.is_some(); - - // let limit = query.limit; - - // let aggr = get_analytics( - // query, - // &app.pool, - // analytics_type, - // segment_channel, - // channel_id, - // ) - // .await?; - - // let response = AnalyticsResponse { aggr, limit }; - - // serde_json::to_string(&response) - // .map_err(|_| ResponseError::BadRequest("error occurred; try again later".to_string())) -} - -pub async fn admin_analytics( - req: Request, - app: &Application, -) -> Result, ResponseError> { - todo!(); - // let auth = req.extensions().get::().expect("auth is required"); - // let advertiser_channels = advertiser_channel_ids(&app.pool, &auth.uid).await?; + let period_in_hours = query.timeframe.get_period_in_hours(); + let start_date = match query.start { + Some(start_date) => DateHour::try_from(start_date)?, + None => DateHour::try_from(Utc::now() - Duration::hours(period_in_hours))?, + }; + + let end_date = match query.end { + Some(end_date) => Some(DateHour::try_from(end_date)?), + None => None, + }; + + let applied_limit = query.limit.min(ANALYTICS_QUERY_LIMIT); + + let allowed_keys = match allowed_keys { + Some(keys) => keys, + None => vec![ + "campaignId".to_string(), + "adUnit".to_string(), + "adSlot".to_string(), + "adSlotType".to_string(), + "advertiser".to_string(), + "publisher".to_string(), + "hostname".to_string(), + "country".to_string(), + "osName".to_string(), + ], + }; + + if let Some(segment_by) = &query.segment_by { + if !allowed_keys.contains(segment_by) { + return Err(ResponseError::BadRequest( + "Disallowed segmentBy".to_string(), + )); + } + } - // let query = serde_urlencoded::from_str::(req.uri().query().unwrap_or(""))?; + let keys_in_query = query.keys(); + for key in keys_in_query { + if !allowed_keys.contains(&key) { + return Err(ResponseError::BadRequest(format!( + "disallowed key in query: {}", + key + ))); + } + } - // let response = get_advanced_reports( - // &app.redis, - // &query.event_type, - // &auth.uid, - // &advertiser_channels, - // ) - // .await - // .map_err(|_| ResponseError::BadRequest("error occurred; try again later".to_string()))?; + let auth = req + .extensions() + .get::() + .expect("request should have session") + .to_owned(); + + let analytics = get_analytics( + &app.pool, + start_date, + end_date, + &query, + auth_as_key, + auth.uid, + applied_limit, + ) + .await?; + + let mut count = 0; + let paid = UnifiedNum::from_u64(0); + + // TODO: We can do this part in the SLQ querry if needed + analytics.iter().for_each(|entry| match &query.metric { + Metric::Count => count += entry.payout_count.unwrap(), + Metric::Paid => { + paid.checked_add(&entry.payout_amount.unwrap()); + } + }); + let output: FetchedAnalytics = match query.metric { + Metric::Count => FetchedAnalytics { + payout_count: Some(count), + payout_amount: None, + }, + Metric::Paid => FetchedAnalytics { + payout_count: None, + payout_amount: Some(paid), + }, + }; - // Ok(success_response(serde_json::to_string(&response)?)) + Ok(success_response(serde_json::to_string(&output)?)) } // async fn cache( From c1e90a095023cd5bc24b87090bfdabd1819c1777 Mon Sep 17 00:00:00 2001 From: Simeon Nakov Date: Mon, 29 Nov 2021 21:03:32 +0200 Subject: [PATCH 03/24] code clean up --- primitives/src/analytics.rs | 11 +++--- sentry/src/db/analytics.rs | 5 ++- sentry/src/routes/analytics.rs | 69 +++++++++++++++++++++------------- 3 files changed, 51 insertions(+), 34 deletions(-) diff --git a/primitives/src/analytics.rs b/primitives/src/analytics.rs index e14d571c9..aa26f7a51 100644 --- a/primitives/src/analytics.rs +++ b/primitives/src/analytics.rs @@ -69,7 +69,6 @@ pub mod postgres { } } -// TODO: Clean up query defaults/start/end #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] pub struct AnalyticsQuery { @@ -132,15 +131,15 @@ impl AnalyticsQuery { pub fn try_get_key(&self, key: &str) -> &Option { match key { - "campaign_id" => &self.campaign_id, - "ad_unit" => &self.ad_unit, - "ad_slot" => &self.ad_slot, - "ad_slot_type" => &self.ad_slot_type, + "campaignId" => &self.campaign_id, + "adUnit" => &self.ad_unit, + "adSlot" => &self.ad_slot, + "adSlotType" => &self.ad_slot_type, "advertiser" => &self.advertiser, "publisher" => &self.publisher, "hostname" => &self.hostname, "country" => &self.country, - "os_name" => &self.os_name, + "osName" => &self.os_name, _ => &None, } } diff --git a/sentry/src/db/analytics.rs b/sentry/src/db/analytics.rs index e4e8d1bfd..cd5efca83 100644 --- a/sentry/src/db/analytics.rs +++ b/sentry/src/db/analytics.rs @@ -21,7 +21,10 @@ pub async fn get_analytics( let mut where_clauses = vec![format!("time >= {}", start_date.to_datetime())]; for key in query.keys() { - let key_value = query.try_get_key(&key).as_ref().unwrap(); + let key_value = query + .try_get_key(&key) + .as_ref() + .expect("Should exist, values have already been validated"); where_clauses.push(format!("{} = {}", key, key_value)); } diff --git a/sentry/src/routes/analytics.rs b/sentry/src/routes/analytics.rs index c0bdfd54d..d264b151b 100644 --- a/sentry/src/routes/analytics.rs +++ b/sentry/src/routes/analytics.rs @@ -1,6 +1,7 @@ use crate::{db::analytics::get_analytics, success_response, Application, Auth, ResponseError}; use chrono::{Duration, Utc}; use hyper::{Body, Request, Response}; +use once_cell::sync::Lazy; use primitives::{ adapter::Adapter, analytics::{AnalyticsQuery, Metric, ANALYTICS_QUERY_LIMIT}, @@ -8,6 +9,20 @@ use primitives::{ UnifiedNum, }; +pub static ALLOWED_KEYS: Lazy<[String; 9]> = Lazy::new(|| { + [ + "campaignId".to_string(), + "adUnit".to_string(), + "adSlot".to_string(), + "adSlotType".to_string(), + "advertiser".to_string(), + "publisher".to_string(), + "hostname".to_string(), + "country".to_string(), + "osName".to_string(), + ] +}); + pub async fn analytics( req: Request, app: &Application, @@ -31,17 +46,7 @@ pub async fn analytics( let allowed_keys = match allowed_keys { Some(keys) => keys, - None => vec![ - "campaignId".to_string(), - "adUnit".to_string(), - "adSlot".to_string(), - "adSlotType".to_string(), - "advertiser".to_string(), - "publisher".to_string(), - "hostname".to_string(), - "country".to_string(), - "osName".to_string(), - ], + None => ALLOWED_KEYS.to_vec(), }; if let Some(segment_by) = &query.segment_by { @@ -52,8 +57,7 @@ pub async fn analytics( } } - let keys_in_query = query.keys(); - for key in keys_in_query { + for key in query.keys() { if !allowed_keys.contains(&key) { return Err(ResponseError::BadRequest(format!( "disallowed key in query: {}", @@ -82,22 +86,33 @@ pub async fn analytics( let mut count = 0; let paid = UnifiedNum::from_u64(0); - // TODO: We can do this part in the SLQ querry if needed - analytics.iter().for_each(|entry| match &query.metric { - Metric::Count => count += entry.payout_count.unwrap(), + // TODO: Discuss this part and potentially implement it as logic in the SQL Query + let output: FetchedAnalytics = match &query.metric { + Metric::Count => { + analytics.iter().for_each(|entry| { + count += entry + .payout_count + .expect("payout_count should be selected and not null") + }); + FetchedAnalytics { + payout_count: Some(count), + payout_amount: None, + } + } Metric::Paid => { - paid.checked_add(&entry.payout_amount.unwrap()); + analytics.iter().for_each(|entry| { + paid.checked_add( + &entry + .payout_amount + .expect("payout_amount should be selected and not null"), + ) + .expect("TODO"); + }); + FetchedAnalytics { + payout_count: None, + payout_amount: Some(paid), + } } - }); - let output: FetchedAnalytics = match query.metric { - Metric::Count => FetchedAnalytics { - payout_count: Some(count), - payout_amount: None, - }, - Metric::Paid => FetchedAnalytics { - payout_count: None, - payout_amount: Some(paid), - }, }; Ok(success_response(serde_json::to_string(&output)?)) From e10ceaad7a63122cf435e01e6afc0ed35e1c08d9 Mon Sep 17 00:00:00 2001 From: Simeon Nakov Date: Fri, 3 Dec 2021 22:25:09 +0200 Subject: [PATCH 04/24] (still WIP) requested changes + test coverage --- docs/config/ganache.toml | 2 +- primitives/src/analytics.rs | 147 +++++++++--------- sentry/src/db/analytics.rs | 91 ++++++----- sentry/src/routes/analytics.rs | 267 +++++++++++++++++++++++++++++++-- 4 files changed, 384 insertions(+), 123 deletions(-) diff --git a/docs/config/ganache.toml b/docs/config/ganache.toml index 63df5deb9..f6c733072 100644 --- a/docs/config/ganache.toml +++ b/docs/config/ganache.toml @@ -41,7 +41,7 @@ ethereum_adapter_relayer = 'http://localhost:8888' creators_whitelist = [] validators_whitelist = [] -admins = ['0xce07CbB7e054514D590a0262C93070D838bFBA2e'] +admins = ['0x80690751969B234697e9059e04ed72195c3507fa'] [[token_address_whitelist]] # Mocked TOKEN diff --git a/primitives/src/analytics.rs b/primitives/src/analytics.rs index aa26f7a51..c1a760117 100644 --- a/primitives/src/analytics.rs +++ b/primitives/src/analytics.rs @@ -1,45 +1,20 @@ -use crate::ChannelId; -use chrono::{DateTime, Utc}; +use crate::{sentry::DateHour, ValidatorId, CampaignId, IPFS, Address}; +use chrono::Utc; use parse_display::Display; use serde::{Deserialize, Serialize}; +use std::collections::HashMap; pub const ANALYTICS_QUERY_LIMIT: u32 = 200; -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct AnalyticsData { - pub time: f64, - pub value: String, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub channel_id: Option, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct AnalyticsResponse { - pub aggr: Vec, - pub limit: u32, -} - #[cfg(feature = "postgres")] pub mod postgres { - use super::{AnalyticsData, OperatingSystem}; + use super::{OperatingSystem, Metric}; use bytes::BytesMut; use std::error::Error; use tokio_postgres::{ types::{accepts, to_sql_checked, FromSql, IsNull, ToSql, Type}, - Row, }; - impl From<&Row> for AnalyticsData { - fn from(row: &Row) -> Self { - Self { - time: row.get("time"), - value: row.get("value"), - channel_id: row.try_get("channel_id").ok(), - } - } - } - impl<'a> FromSql<'a> for OperatingSystem { fn from_sql(ty: &Type, raw: &'a [u8]) -> Result> { let str_slice = <&str as FromSql>::from_sql(ty, raw)?; @@ -67,9 +42,18 @@ pub mod postgres { accepts!(TEXT, VARCHAR); to_sql_checked!(); } + + impl ToSql for Metric { + fn to_sql(&self, ty: &Type, w: &mut BytesMut) -> Result> { + self.column_name().to_sql(ty, w) + } + + accepts!(TEXT, VARCHAR); + to_sql_checked!(); + } } -#[derive(Debug, Deserialize)] +#[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct AnalyticsQuery { #[serde(default = "default_limit")] @@ -81,68 +65,53 @@ pub struct AnalyticsQuery { #[serde(default = "default_timeframe")] pub timeframe: Timeframe, pub segment_by: Option, - pub start: Option>, - pub end: Option>, + pub start: Option>, + pub end: Option>, // #[serde(default = "default_timezone")] // pub timezone: String, - pub campaign_id: Option, - pub ad_unit: Option, - pub ad_slot: Option, + pub campaign_id: Option, + pub ad_unit: Option, + pub ad_slot: Option, pub ad_slot_type: Option, - pub advertiser: Option, - pub publisher: Option, + pub advertiser: Option
, + pub publisher: Option
, pub hostname: Option, pub country: Option, - pub os_name: Option, + pub os_name: Option, } impl AnalyticsQuery { - pub fn keys(&self) -> Vec { - let mut keys = vec![]; - if self.campaign_id.is_some() { - keys.push("campaignId".into()) + pub fn available_keys(&self) -> HashMap{ + let mut keys: HashMap = HashMap::new(); + if let Some(campaign_id) = self.campaign_id { + keys.insert("campaign_id".into(), campaign_id.to_string()); } - if self.ad_unit.is_some() { - keys.push("adUnit".into()) + if let Some(ad_unit) = self.ad_unit { + keys.insert("ad_unit".into(), ad_unit.to_string()); } - if self.ad_slot.is_some() { - keys.push("adslot".into()) + if let Some(ad_slot) = self.ad_slot { + keys.insert("ad_slot".into(), ad_slot.to_string()); } - if self.ad_slot_type.is_some() { - keys.push("adSlotType".into()) + if let Some(ad_slot_type) = &self.ad_slot_type { + keys.insert("ad_slot_type".into(), ad_slot_type.to_string()); } - if self.advertiser.is_some() { - keys.push("advertiser".into()) + if let Some(advertiser) = self.advertiser { + keys.insert("advertiser".into(), advertiser.to_string()); } - if self.publisher.is_some() { - keys.push("publisher".into()) + if let Some(publisher) = self.publisher { + keys.insert("publisher".into(), publisher.to_string()); } - if self.hostname.is_some() { - keys.push("hostname".into()) + if let Some(hostname) = &self.hostname { + keys.insert("hostname".into(), hostname.to_string()); } - if self.campaign_id.is_some() { - keys.push("country".into()) + if let Some(country) = &self.country { + keys.insert("country".into(), country.to_string()); } - if self.campaign_id.is_some() { - keys.push("osName".into()) + if let Some(os_name) = &self.os_name { + keys.insert("os_name".into(), os_name.to_string()); } keys } - - pub fn try_get_key(&self, key: &str) -> &Option { - match key { - "campaignId" => &self.campaign_id, - "adUnit" => &self.ad_unit, - "adSlot" => &self.ad_slot, - "adSlotType" => &self.ad_slot_type, - "advertiser" => &self.advertiser, - "publisher" => &self.publisher, - "hostname" => &self.hostname, - "country" => &self.country, - "osName" => &self.os_name, - _ => &None, - } - } } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Display, Hash, Eq)] @@ -155,6 +124,7 @@ pub enum OperatingSystem { } #[derive(Debug, Clone, Serialize, Deserialize, Display)] +#[serde(rename_all = "lowercase")] pub enum Timeframe { Year, Month, @@ -163,13 +133,42 @@ pub enum Timeframe { } #[derive(Debug, Clone, Serialize, Deserialize, Display)] +#[serde(rename_all = "lowercase")] pub enum Metric { Count, Paid, } +#[derive(Debug, Clone, Serialize, Deserialize, Display)] +pub enum AuthenticateAs { + #[display("{0}")] + Advertiser(ValidatorId), + #[display("{0}")] + Publisher(ValidatorId), +} + +impl AuthenticateAs { + pub fn try_from(key: &str, uid: ValidatorId) -> Option { + match key { + "advertiser" => Some(Self::Advertiser(uid)), + "publisher" => Some(Self::Publisher(uid)), + // TODO: Should we throw an error here + _ => None, + } + } +} + +impl Metric { + pub fn column_name(&self) -> String { + match self { + Metric::Count => "payout_count".to_string(), + Metric::Paid => "payout_amount".to_string(), + } + } +} + impl Timeframe { - pub fn get_period_in_hours(&self) -> i64 { + pub fn to_hours(&self) -> i64 { let hour = 1; let day = 24 * hour; let year = 365 * day; diff --git a/sentry/src/db/analytics.rs b/sentry/src/db/analytics.rs index cd5efca83..76f72fa02 100644 --- a/sentry/src/db/analytics.rs +++ b/sentry/src/db/analytics.rs @@ -1,9 +1,9 @@ use chrono::Utc; use primitives::{ - analytics::AnalyticsQuery, + analytics::{AnalyticsQuery, AuthenticateAs}, sentry::{Analytics, DateHour, FetchedAnalytics, UpdateAnalytics}, - ValidatorId, }; +use tokio_postgres::types::ToSql; use super::{DbPool, PoolError}; @@ -12,62 +12,79 @@ pub async fn get_analytics( start_date: DateHour, end_date: Option>, query: &AnalyticsQuery, - auth_as_key: Option, - auth_uid: ValidatorId, + auth_as: Option, limit: u32, ) -> Result, PoolError> { let client = pool.get().await?; - let mut where_clauses = vec![format!("time >= {}", start_date.to_datetime())]; + let (where_clauses, mut params) = analytics_query_params(&start_date, &end_date, query, &auth_as); - for key in query.keys() { - let key_value = query - .try_get_key(&key) - .as_ref() - .expect("Should exist, values have already been validated"); - where_clauses.push(format!("{} = {}", key, key_value)); - } - - if let Some(auth_as_key) = auth_as_key { - where_clauses.push(format!("{} = {}", auth_as_key, auth_uid)) - } + let mut select_clause = vec!["time".to_string(), format!("${}", params.len() + 1)]; + params.push(&query.metric); + let mut group_clause = vec!["time".to_string()]; - if let Some(end_date) = end_date { - where_clauses.push(format!("time <= {}", end_date.to_datetime())); + if let Some(segment_by) = &query.segment_by { + select_clause.push(segment_by.to_string()); + group_clause.push(segment_by.to_string()); } - where_clauses.extend(vec![ - format!("event_type = ${}", query.event_type), - format!("{} IS NOT NULL", query.metric.to_string()), - ]); - - let (select_clause, group_clause) = match &query.segment_by { - Some(segment_by) => ( - format!("{}, time, {}", &query.metric.to_string(), segment_by), - format!("time, {}", segment_by), - ), - None => ( - format!("{}, time", query.metric.to_string()), - "time".to_string(), - ), - }; let sql_query = format!( - "SELECT {} FROM analytics WHERE {} GROUP BY {} LIMIT {}", - select_clause, + "SELECT {} FROM analytics WHERE {} GROUP BY {} ORDER BY time DESC LIMIT {}", + select_clause.join(","), where_clauses.join(" AND "), - group_clause, + group_clause.join(","), limit, ); + println!("{}", sql_query); // execute query let stmt = client.prepare(&sql_query).await?; - let rows = client.query(&stmt, &[]).await?; + let rows = client.query(&stmt, params.as_slice()).await?; let analytics: Vec = rows.iter().map(FetchedAnalytics::from).collect(); Ok(analytics) } +fn analytics_query_params<'a>( + start_date: &'a DateHour, + end_date: &'a Option>, + query: &'a AnalyticsQuery, + auth_as: &'a Option, +) -> (Vec, Vec<&'a (dyn ToSql + Sync)>) { + let mut where_clauses: Vec = vec!["time >= $1".to_string()]; + let mut params: Vec<&(dyn ToSql + Sync)> = vec![start_date]; + + for (key, value) in query.available_keys() { + where_clauses.push(format!("{} = ${}", key, params.len() + 1)); + params.push(&value); + } + + if let Some(auth_as) = auth_as { + match auth_as { + AuthenticateAs::Publisher(uid) => { + where_clauses.push(format!("publisher = ${}", params.len() + 1)); + params.push(uid); + }, + AuthenticateAs::Advertiser(uid) => { + where_clauses.push(format!("advertiser = ${}", params.len() + 1)); + params.push(uid); + } + } + } + + if let Some(end_date) = end_date { + where_clauses.push(format!("time <= ${}", params.len() + 1)); + params.push(end_date); + } + where_clauses.push(format!("event_type = ${}", params.len() + 1)); + params.push(&query.event_type); + + where_clauses.push(format!("{} IS NOT NULL", query.metric.column_name())); + + (where_clauses, params) +} + /// This will update a record when it's present by incrementing its payout_amount and payout_count fields pub async fn update_analytics( pool: &DbPool, diff --git a/sentry/src/routes/analytics.rs b/sentry/src/routes/analytics.rs index d264b151b..9f794a77a 100644 --- a/sentry/src/routes/analytics.rs +++ b/sentry/src/routes/analytics.rs @@ -1,10 +1,10 @@ use crate::{db::analytics::get_analytics, success_response, Application, Auth, ResponseError}; -use chrono::{Duration, Utc}; +use chrono::{Duration, Utc, Timelike}; use hyper::{Body, Request, Response}; use once_cell::sync::Lazy; use primitives::{ adapter::Adapter, - analytics::{AnalyticsQuery, Metric, ANALYTICS_QUERY_LIMIT}, + analytics::{AnalyticsQuery, Metric, AuthenticateAs, ANALYTICS_QUERY_LIMIT}, sentry::{DateHour, FetchedAnalytics}, UnifiedNum, }; @@ -30,11 +30,10 @@ pub async fn analytics( auth_as_key: Option, ) -> Result, ResponseError> { let query = serde_urlencoded::from_str::(req.uri().query().unwrap_or(""))?; - - let period_in_hours = query.timeframe.get_period_in_hours(); + let period_in_hours = query.timeframe.to_hours(); let start_date = match query.start { Some(start_date) => DateHour::try_from(start_date)?, - None => DateHour::try_from(Utc::now() - Duration::hours(period_in_hours))?, + None => DateHour::try_from(Utc::today().and_hms(Utc::now().hour(), 0, 0) - Duration::hours(period_in_hours))?, }; let end_date = match query.end { @@ -57,7 +56,7 @@ pub async fn analytics( } } - for key in query.keys() { + for (key, _) in query.available_keys() { if !allowed_keys.contains(&key) { return Err(ResponseError::BadRequest(format!( "disallowed key in query: {}", @@ -68,17 +67,20 @@ pub async fn analytics( let auth = req .extensions() - .get::() - .expect("request should have session") - .to_owned(); + .get::(); + + let auth_as = match (auth_as_key, auth) { + (Some(auth_as_key), Some(auth)) => AuthenticateAs::try_from(&auth_as_key, auth.uid), + (Some(_), None) => return Err(ResponseError::BadRequest("auth_as_key is provided but there is no Auth object".to_string())), + _ => None + }; let analytics = get_analytics( &app.pool, start_date, end_date, &query, - auth_as_key, - auth.uid, + auth_as, applied_limit, ) .await?; @@ -135,3 +137,246 @@ pub async fn analytics( // error!(&logger, "Server error: {}", err; "module" => "analytics-cache"); // } // } + +#[cfg(test)] +mod test { + use super::*; + use primitives::{ + sentry::UpdateAnalytics, + analytics::OperatingSystem, + util::tests::prep_db::{ + DUMMY_CAMPAIGN, ADDRESSES + }, + }; + use crate:: + { + test_util::setup_dummy_app, + routes::analytics::analytics, + db::{ + analytics::update_analytics, + tests_postgres::{setup_test_migrations, DATABASE_POOL}, + DbPool + } + }; + + async fn insert_mock_analytics(pool: &DbPool) { + // analytics for NOW + let now_date = Utc::today().and_hms(1, 0, 0); + let analytics_now = UpdateAnalytics { + time: DateHour::try_from(now_date).expect("should parse"), + campaign_id: DUMMY_CAMPAIGN.id, + ad_unit: None, + ad_slot: None, + ad_slot_type: None, + advertiser: ADDRESSES["creator"], + publisher: ADDRESSES["publisher"], + hostname: None, + country: Some("Bulgaria".to_string()), + os_name: OperatingSystem::map_os("Windows"), + event_type: "CLICK".to_string(), + amount_to_add: UnifiedNum::from_u64(1_000_000), + count_to_add: 1, + }; + update_analytics(pool, analytics_now).await.expect("Should update analytics"); + + let analytics_now_different_country = UpdateAnalytics { + time: DateHour::try_from(now_date).expect("should parse"), + campaign_id: DUMMY_CAMPAIGN.id, + ad_unit: None, + ad_slot: None, + ad_slot_type: None, + advertiser: ADDRESSES["creator"], + publisher: ADDRESSES["publisher"], + hostname: None, + country: Some("Japan".to_string()), + os_name: OperatingSystem::map_os("Windows"), + event_type: "CLICK".to_string(), + amount_to_add: UnifiedNum::from_u64(1_000_000), + count_to_add: 1, + }; + update_analytics(pool, analytics_now_different_country).await.expect("Should update analytics"); + + let analytics_two_hours_ago = UpdateAnalytics { + time: DateHour::try_from(now_date - Duration::hours(2)).expect("should parse"), + campaign_id: DUMMY_CAMPAIGN.id, + ad_unit: None, + ad_slot: None, + ad_slot_type: None, + advertiser: ADDRESSES["creator"], + publisher: ADDRESSES["publisher"], + hostname: None, + country: Some("Bulgaria".to_string()), + os_name: OperatingSystem::map_os("Windows"), + event_type: "CLICK".to_string(), + amount_to_add: UnifiedNum::from_u64(1_000_000), + count_to_add: 1, + }; + update_analytics(pool, analytics_two_hours_ago).await.expect("Should update analytics"); + + let analytics_four_hours_ago = UpdateAnalytics { + time: DateHour::try_from(now_date - Duration::hours(4)).expect("should parse"), + campaign_id: DUMMY_CAMPAIGN.id, + ad_unit: None, + ad_slot: None, + ad_slot_type: None, + advertiser: ADDRESSES["creator"], + publisher: ADDRESSES["publisher"], + hostname: None, + country: Some("Bulgaria".to_string()), + os_name: OperatingSystem::map_os("Windows"), + event_type: "CLICK".to_string(), + amount_to_add: UnifiedNum::from_u64(1_000_000), + count_to_add: 1, + }; + update_analytics(pool, analytics_four_hours_ago).await.expect("Should update analytics"); + + let analytics_three_days_ago = UpdateAnalytics { + time: DateHour::try_from(now_date - Duration::days(3)).expect("should parse"), + campaign_id: DUMMY_CAMPAIGN.id, + ad_unit: None, + ad_slot: None, + ad_slot_type: None, + advertiser: ADDRESSES["creator"], + publisher: ADDRESSES["publisher"], + hostname: None, + country: Some("Bulgaria".to_string()), + os_name: OperatingSystem::map_os("Windows"), + event_type: "CLICK".to_string(), + amount_to_add: UnifiedNum::from_u64(1_000_000), + count_to_add: 1, + }; + update_analytics(pool, analytics_three_days_ago).await.expect("Should update analytics"); + // analytics from 10 days ago + let analytics_ten_days_ago = UpdateAnalytics { + time: DateHour::try_from(now_date - Duration::days(10)).expect("should parse"), + campaign_id: DUMMY_CAMPAIGN.id, + ad_unit: None, + ad_slot: None, + ad_slot_type: None, + advertiser: ADDRESSES["creator"], + publisher: ADDRESSES["publisher"], + hostname: None, + country: Some("Bulgaria".to_string()), + os_name: OperatingSystem::map_os("Windows"), + event_type: "CLICK".to_string(), + amount_to_add: UnifiedNum::from_u64(1_000_000), + count_to_add: 1, + }; + update_analytics(pool, analytics_ten_days_ago).await.expect("Should update analytics"); + + let analytics_sixty_days_ago = UpdateAnalytics { + time: DateHour::try_from(now_date - Duration::days(60)).expect("should parse"), + campaign_id: DUMMY_CAMPAIGN.id, + ad_unit: None, + ad_slot: None, + ad_slot_type: None, + advertiser: ADDRESSES["creator"], + publisher: ADDRESSES["publisher"], + hostname: None, + country: Some("Bulgaria".to_string()), + os_name: OperatingSystem::map_os("Windows"), + event_type: "CLICK".to_string(), + amount_to_add: UnifiedNum::from_u64(1_000_000), + count_to_add: 1, + }; + update_analytics(pool, analytics_sixty_days_ago).await.expect("Should update analytics"); + + let analytics_two_years_ago = UpdateAnalytics { + time: DateHour::try_from(now_date - Duration::weeks(104)).expect("should parse"), + campaign_id: DUMMY_CAMPAIGN.id, + ad_unit: None, + ad_slot: None, + ad_slot_type: None, + advertiser: ADDRESSES["creator"], + publisher: ADDRESSES["publisher"], + hostname: None, + country: Some("Bulgaria".to_string()), + os_name: OperatingSystem::map_os("Windows"), + event_type: "CLICK".to_string(), + amount_to_add: UnifiedNum::from_u64(1_000_000), + count_to_add: 1, + }; + update_analytics(pool, analytics_two_years_ago).await.expect("Should update analytics"); + } + + #[tokio::test] + async fn test_analytics_route_for_guest() { + let database = DATABASE_POOL.get().await.expect("Should get a DB pool"); + let app = setup_dummy_app().await; + + setup_test_migrations(database.pool.clone()) + .await + .expect("Migrations should succeed"); + + insert_mock_analytics(&database.pool).await; + + // Test with no optional values + let req = Request::builder() + .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day") + .body(Body::empty()) + .expect("Should build Request"); + + let analytics_response = analytics(req, &app, Some(vec!["country".into(), "ad_slot_type".into()]), None).await.expect("Should get analytics data"); + let json = hyper::body::to_bytes(analytics_response.into_body()) + .await + .expect("Should get json"); + + let fetched_analytics: FetchedAnalytics = + serde_json::from_slice(&json).expect("Should get analytics response"); + assert!(fetched_analytics.payout_count.is_some()); + assert_eq!(fetched_analytics.payout_count.unwrap(), 4); + // Test with start date + + let start_date = Utc::today().and_hms(Utc::now().hour(), 0, 0) - Duration::hours(1); + let req = Request::builder() + .uri(format!("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day&start={}", start_date)) + .body(Body::empty()) + .expect("Should build Request"); + + let analytics_response = analytics(req, &app, Some(vec!["country".into(), "ad_slot_type".into()]), None).await.expect("Should get analytics data"); + let json = hyper::body::to_bytes(analytics_response.into_body()) + .await + .expect("Should get json"); + + let fetched_analytics: FetchedAnalytics = + serde_json::from_slice(&json).expect("Should get analytics response"); + assert_eq!(fetched_analytics.payout_count.unwrap(), 2); + + // Test with end date + let end_date = Utc::today().and_hms(Utc::now().hour(), 0, 0) - Duration::hours(1); + let req = Request::builder() + .uri(format!("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day&end={}", end_date)) + .body(Body::empty()) + .expect("Should build Request"); + + let analytics_response = analytics(req, &app, Some(vec!["country".into(), "ad_slot_type".into()]), None).await.expect("Should get analytics data"); + let json = hyper::body::to_bytes(analytics_response.into_body()) + .await + .expect("Should get json"); + + let fetched_analytics: FetchedAnalytics = + serde_json::from_slice(&json).expect("Should get analytics response"); + assert_eq!(fetched_analytics.payout_count.unwrap(), 3); + + // Test with start_date and end_date + let start_date = Utc::today().and_hms(Utc::now().hour(), 0, 0) - Duration::hours(72); + let end_date = Utc::today().and_hms(Utc::now().hour(), 0, 0) - Duration::hours(1); + let req = Request::builder() + .uri(format!("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day&start={}&end={}", start_date, end_date)) + .body(Body::empty()) + .expect("Should build Request"); + + let analytics_response = analytics(req, &app, Some(vec!["country".into(), "ad_slot_type".into()]), None).await.expect("Should get analytics data"); + let json = hyper::body::to_bytes(analytics_response.into_body()) + .await + .expect("Should get json"); + + let fetched_analytics: FetchedAnalytics = + serde_json::from_slice(&json).expect("Should get analytics response"); + assert_eq!(fetched_analytics.payout_count.unwrap(), 2); + // Test with segment_by + // test with not allowed segment by + // test with not allowed key + // test with different metric + } +} \ No newline at end of file From 00397f6da5454089e8802404d559228a9433c619 Mon Sep 17 00:00:00 2001 From: Simeon Nakov Date: Mon, 6 Dec 2021 15:20:14 +0200 Subject: [PATCH 05/24] modified output + output is now split by timeframe --- primitives/src/sentry.rs | 15 +++------ sentry/src/db/analytics.rs | 16 ++++++++-- sentry/src/routes/analytics.rs | 57 +++++++++++++++++++++------------- 3 files changed, 55 insertions(+), 33 deletions(-) diff --git a/primitives/src/sentry.rs b/primitives/src/sentry.rs index d090de172..f7e5347e9 100644 --- a/primitives/src/sentry.rs +++ b/primitives/src/sentry.rs @@ -207,8 +207,12 @@ pub struct Analytics { #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)] #[serde(rename_all = "camelCase")] pub struct FetchedAnalytics { + // time is represented as a timestamp + pub time: i64, pub payout_amount: Option, pub payout_count: Option, + // We can't know the exact segment type but it can always be represented as a string + pub segment: Option, } #[derive(Debug, Error, PartialEq, Eq)] @@ -759,7 +763,7 @@ pub mod campaign_create { #[cfg(feature = "postgres")] mod postgres { - use super::{Analytics, DateHour, FetchedAnalytics, MessageResponse, ValidatorMessage}; + use super::{Analytics, DateHour, MessageResponse, ValidatorMessage}; use crate::{ sentry::EventAggregate, validator::{messages::Type as MessageType, MessageTypes}, @@ -900,15 +904,6 @@ mod postgres { accepts!(TIMESTAMPTZ); to_sql_checked!(); } - - impl From<&Row> for FetchedAnalytics { - fn from(row: &Row) -> Self { - Self { - payout_amount: row.get("payout_amount"), - payout_count: Some(row.get::<_, i32>("payout_count").unsigned_abs()), - } - } - } } #[cfg(test)] diff --git a/sentry/src/db/analytics.rs b/sentry/src/db/analytics.rs index 76f72fa02..93d8652d1 100644 --- a/sentry/src/db/analytics.rs +++ b/sentry/src/db/analytics.rs @@ -29,7 +29,7 @@ pub async fn get_analytics( } let sql_query = format!( - "SELECT {} FROM analytics WHERE {} GROUP BY {} ORDER BY time DESC LIMIT {}", + "SELECT {} FROM analytics WHERE {} GROUP BY {} ORDER BY time ASC LIMIT {}", select_clause.join(","), where_clauses.join(" AND "), group_clause.join(","), @@ -41,7 +41,19 @@ pub async fn get_analytics( let stmt = client.prepare(&sql_query).await?; let rows = client.query(&stmt, params.as_slice()).await?; - let analytics: Vec = rows.iter().map(FetchedAnalytics::from).collect(); + let analytics: Vec = rows.iter().map(|row| { + // Since segment_by is a dynamic value/type it can't be passed to from<&Row> so we're building the object here + let segment_value = match &query.segment_by { + Some(segment_by) => row.try_get(&**segment_by).ok(), + None => None + }; + FetchedAnalytics { + time: row.get("time"), + payout_amount: row.try_get("payout_amount").ok(), + payout_count: row.try_get("payout_count").ok(), + segment: segment_value, + } + }).collect(); Ok(analytics) } diff --git a/sentry/src/routes/analytics.rs b/sentry/src/routes/analytics.rs index 9f794a77a..52dc8f831 100644 --- a/sentry/src/routes/analytics.rs +++ b/sentry/src/routes/analytics.rs @@ -85,39 +85,54 @@ pub async fn analytics( ) .await?; - let mut count = 0; - let paid = UnifiedNum::from_u64(0); + let output = split_entries_by_timeframe(analytics, period_in_hours, &query.metric, &query.segment_by); + + Ok(success_response(serde_json::to_string(&output)?)) +} + +fn split_entries_by_timeframe(mut analytics: Vec, period_in_hours: i64, metric: &Metric, segment: &Option) -> Vec { + let mut res: Vec = vec![]; + let period_in_hours = period_in_hours as usize; + while analytics.len() > period_in_hours { + let drain_index = analytics.len() - period_in_hours; + let analytics_fraction: Vec = analytics.drain(drain_index..).collect(); + let merged_analytics = merge_analytics(analytics_fraction, metric, segment); + res.push(merged_analytics); + } + + if analytics.len() > 0 { + let merged_analytics = merge_analytics(analytics, metric, segment); + res.push(merged_analytics); + } + + res +} - // TODO: Discuss this part and potentially implement it as logic in the SQL Query - let output: FetchedAnalytics = match &query.metric { +fn merge_analytics(analytics: Vec, metric: &Metric, segment: &Option) -> FetchedAnalytics { + let mut count = 0; + let amount = UnifiedNum::from_u64(0); + match metric { Metric::Count => { - analytics.iter().for_each(|entry| { - count += entry - .payout_count - .expect("payout_count should be selected and not null") - }); + analytics.iter().for_each(|a| count += a.payout_count.unwrap()); FetchedAnalytics { + time: analytics.iter().nth(0).unwrap().time, payout_count: Some(count), payout_amount: None, + segment: segment.clone(), } - } + }, Metric::Paid => { - analytics.iter().for_each(|entry| { - paid.checked_add( - &entry - .payout_amount - .expect("payout_amount should be selected and not null"), - ) - .expect("TODO"); + analytics.iter().for_each(|a| { + amount.checked_add(&a.payout_amount.unwrap()).unwrap(); }); FetchedAnalytics { + time: analytics.iter().nth(0).unwrap().time, payout_count: None, - payout_amount: Some(paid), + payout_amount: Some(amount), + segment: segment.clone(), } } - }; - - Ok(success_response(serde_json::to_string(&output)?)) + } } // async fn cache( From 5be038af2cfd27475493edc73b1b9c511c02b8e5 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Tue, 7 Dec 2021 09:49:18 +0200 Subject: [PATCH 06/24] primitives - analytics - rename_all=camelCase --- primitives/src/analytics.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/primitives/src/analytics.rs b/primitives/src/analytics.rs index c1a760117..4ac5f6de3 100644 --- a/primitives/src/analytics.rs +++ b/primitives/src/analytics.rs @@ -124,7 +124,7 @@ pub enum OperatingSystem { } #[derive(Debug, Clone, Serialize, Deserialize, Display)] -#[serde(rename_all = "lowercase")] +#[serde(rename_all = "camelCase")] pub enum Timeframe { Year, Month, @@ -132,14 +132,14 @@ pub enum Timeframe { Day, } -#[derive(Debug, Clone, Serialize, Deserialize, Display)] -#[serde(rename_all = "lowercase")] +#[derive(Debug, Clone, Copy, Serialize, Deserialize, Display)] +#[serde(rename_all = "camelCase")] pub enum Metric { Count, Paid, } -#[derive(Debug, Clone, Serialize, Deserialize, Display)] +#[derive(Debug, Clone, Copy, Serialize, Deserialize, Display)] pub enum AuthenticateAs { #[display("{0}")] Advertiser(ValidatorId), From 150b1dda630f43af7f20d17f8980039a319a2dfd Mon Sep 17 00:00:00 2001 From: Simeon Nakov Date: Tue, 7 Dec 2021 16:38:08 +0200 Subject: [PATCH 07/24] improved query, small changes to logic --- primitives/src/analytics.rs | 124 ++++++++++++++---------- sentry/src/db/analytics.rs | 46 +++++---- sentry/src/routes/analytics.rs | 169 ++++++++++++++++++++++----------- 3 files changed, 212 insertions(+), 127 deletions(-) diff --git a/primitives/src/analytics.rs b/primitives/src/analytics.rs index c1a760117..4380da82f 100644 --- a/primitives/src/analytics.rs +++ b/primitives/src/analytics.rs @@ -1,19 +1,16 @@ -use crate::{sentry::DateHour, ValidatorId, CampaignId, IPFS, Address}; +use crate::{sentry::DateHour, Address, CampaignId, ValidatorId, IPFS}; use chrono::Utc; use parse_display::Display; use serde::{Deserialize, Serialize}; -use std::collections::HashMap; pub const ANALYTICS_QUERY_LIMIT: u32 = 200; #[cfg(feature = "postgres")] pub mod postgres { - use super::{OperatingSystem, Metric}; + use super::{AnalyticsQueryKey, Metric, OperatingSystem}; use bytes::BytesMut; use std::error::Error; - use tokio_postgres::{ - types::{accepts, to_sql_checked, FromSql, IsNull, ToSql, Type}, - }; + use tokio_postgres::types::{accepts, to_sql_checked, FromSql, IsNull, ToSql, Type}; impl<'a> FromSql<'a> for OperatingSystem { fn from_sql(ty: &Type, raw: &'a [u8]) -> Result> { @@ -43,8 +40,33 @@ pub mod postgres { to_sql_checked!(); } + impl ToSql for AnalyticsQueryKey { + fn to_sql( + &self, + ty: &Type, + w: &mut BytesMut, + ) -> Result> { + match self { + Self::CampaignId(id) => id.to_string().to_sql(ty, w), + Self::AdUnit(ipfs) | Self::AdSlot(ipfs) => ipfs.to_string().to_sql(ty, w), + Self::AdSlotType(value) | Self::Hostname(value) | Self::Country(value) => { + value.to_sql(ty, w) + } + Self::Advertiser(addr) | Self::Publisher(addr) => addr.to_string().to_sql(ty, w), + Self::OperatingSystem(os_name) => os_name.to_string().to_sql(ty, w), + } + } + + accepts!(TEXT, VARCHAR); + to_sql_checked!(); + } + impl ToSql for Metric { - fn to_sql(&self, ty: &Type, w: &mut BytesMut) -> Result> { + fn to_sql( + &self, + ty: &Type, + w: &mut BytesMut, + ) -> Result> { self.column_name().to_sql(ty, w) } @@ -69,48 +91,54 @@ pub struct AnalyticsQuery { pub end: Option>, // #[serde(default = "default_timezone")] // pub timezone: String, - pub campaign_id: Option, - pub ad_unit: Option, - pub ad_slot: Option, - pub ad_slot_type: Option, - pub advertiser: Option
, - pub publisher: Option
, - pub hostname: Option, - pub country: Option, - pub os_name: Option, + #[serde(flatten)] + pub campaign_id: Option, + #[serde(flatten)] + pub ad_unit: Option, + #[serde(flatten)] + pub ad_slot: Option, + #[serde(flatten)] + pub ad_slot_type: Option, + #[serde(flatten)] + pub advertiser: Option, + #[serde(flatten)] + pub publisher: Option, + #[serde(flatten)] + pub hostname: Option, + #[serde(flatten)] + pub country: Option, + #[serde(flatten)] + pub os_name: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum AnalyticsQueryKey { + CampaignId(CampaignId), + AdUnit(IPFS), + AdSlot(IPFS), + AdSlotType(String), + Advertiser(Address), + Publisher(Address), + Hostname(String), + Country(String), + OperatingSystem(OperatingSystem), } impl AnalyticsQuery { - pub fn available_keys(&self) -> HashMap{ - let mut keys: HashMap = HashMap::new(); - if let Some(campaign_id) = self.campaign_id { - keys.insert("campaign_id".into(), campaign_id.to_string()); - } - if let Some(ad_unit) = self.ad_unit { - keys.insert("ad_unit".into(), ad_unit.to_string()); - } - if let Some(ad_slot) = self.ad_slot { - keys.insert("ad_slot".into(), ad_slot.to_string()); - } - if let Some(ad_slot_type) = &self.ad_slot_type { - keys.insert("ad_slot_type".into(), ad_slot_type.to_string()); - } - if let Some(advertiser) = self.advertiser { - keys.insert("advertiser".into(), advertiser.to_string()); - } - if let Some(publisher) = self.publisher { - keys.insert("publisher".into(), publisher.to_string()); - } - if let Some(hostname) = &self.hostname { - keys.insert("hostname".into(), hostname.to_string()); - } - if let Some(country) = &self.country { - keys.insert("country".into(), country.to_string()); - } - if let Some(os_name) = &self.os_name { - keys.insert("os_name".into(), os_name.to_string()); + pub fn get_key(&self, key: &str) -> &Option { + match key { + "campaignId" => &self.campaign_id, + "adUnit" => &self.ad_unit, + "adSlot" => &self.ad_slot, + "adSlotType" => &self.ad_slot_type, + "advertiser" => &self.advertiser, + "publisher" => &self.publisher, + "hostname" => &self.hostname, + "country" => &self.country, + "osName" => &self.os_name, + _ => &None, } - keys } } @@ -124,7 +152,7 @@ pub enum OperatingSystem { } #[derive(Debug, Clone, Serialize, Deserialize, Display)] -#[serde(rename_all = "lowercase")] +#[serde(rename_all = "camelCase")] pub enum Timeframe { Year, Month, @@ -133,13 +161,13 @@ pub enum Timeframe { } #[derive(Debug, Clone, Serialize, Deserialize, Display)] -#[serde(rename_all = "lowercase")] +#[serde(rename_all = "camelCase")] pub enum Metric { Count, Paid, } -#[derive(Debug, Clone, Serialize, Deserialize, Display)] +#[derive(Debug, Clone, Copy, Serialize, Deserialize, Display)] pub enum AuthenticateAs { #[display("{0}")] Advertiser(ValidatorId), diff --git a/sentry/src/db/analytics.rs b/sentry/src/db/analytics.rs index 93d8652d1..0ca951bda 100644 --- a/sentry/src/db/analytics.rs +++ b/sentry/src/db/analytics.rs @@ -10,14 +10,15 @@ use super::{DbPool, PoolError}; pub async fn get_analytics( pool: &DbPool, start_date: DateHour, - end_date: Option>, query: &AnalyticsQuery, + allowed_keys: Vec, auth_as: Option, limit: u32, ) -> Result, PoolError> { let client = pool.get().await?; - let (where_clauses, mut params) = analytics_query_params(&start_date, &end_date, query, &auth_as); + let (where_clauses, mut params) = + analytics_query_params(&start_date, query, &auth_as, &allowed_keys); let mut select_clause = vec!["time".to_string(), format!("${}", params.len() + 1)]; params.push(&query.metric); @@ -41,43 +42,46 @@ pub async fn get_analytics( let stmt = client.prepare(&sql_query).await?; let rows = client.query(&stmt, params.as_slice()).await?; - let analytics: Vec = rows.iter().map(|row| { - // Since segment_by is a dynamic value/type it can't be passed to from<&Row> so we're building the object here - let segment_value = match &query.segment_by { - Some(segment_by) => row.try_get(&**segment_by).ok(), - None => None - }; - FetchedAnalytics { - time: row.get("time"), - payout_amount: row.try_get("payout_amount").ok(), - payout_count: row.try_get("payout_count").ok(), - segment: segment_value, - } - }).collect(); + let analytics: Vec = rows + .iter() + .map(|row| { + // Since segment_by is a dynamic value/type it can't be passed to from<&Row> so we're building the object here + let segment_value = match &query.segment_by { + Some(segment_by) => row.try_get(&**segment_by).ok(), + None => None, + }; + FetchedAnalytics { + time: row.get("time"), + payout_amount: row.try_get("payout_amount").ok(), + payout_count: row.try_get("payout_count").ok(), + segment: segment_value, + } + }) + .collect(); Ok(analytics) } fn analytics_query_params<'a>( start_date: &'a DateHour, - end_date: &'a Option>, query: &'a AnalyticsQuery, auth_as: &'a Option, + allowed_keys: &[String], ) -> (Vec, Vec<&'a (dyn ToSql + Sync)>) { let mut where_clauses: Vec = vec!["time >= $1".to_string()]; let mut params: Vec<&(dyn ToSql + Sync)> = vec![start_date]; - for (key, value) in query.available_keys() { + allowed_keys.iter().for_each(|key| { where_clauses.push(format!("{} = ${}", key, params.len() + 1)); - params.push(&value); - } + params.push(query.get_key(key)); + }); if let Some(auth_as) = auth_as { match auth_as { AuthenticateAs::Publisher(uid) => { where_clauses.push(format!("publisher = ${}", params.len() + 1)); params.push(uid); - }, + } AuthenticateAs::Advertiser(uid) => { where_clauses.push(format!("advertiser = ${}", params.len() + 1)); params.push(uid); @@ -85,7 +89,7 @@ fn analytics_query_params<'a>( } } - if let Some(end_date) = end_date { + if let Some(end_date) = &query.end { where_clauses.push(format!("time <= ${}", params.len() + 1)); params.push(end_date); } diff --git a/sentry/src/routes/analytics.rs b/sentry/src/routes/analytics.rs index 52dc8f831..636e9b70e 100644 --- a/sentry/src/routes/analytics.rs +++ b/sentry/src/routes/analytics.rs @@ -1,10 +1,10 @@ use crate::{db::analytics::get_analytics, success_response, Application, Auth, ResponseError}; -use chrono::{Duration, Utc, Timelike}; +use chrono::{Duration, Timelike, Utc}; use hyper::{Body, Request, Response}; use once_cell::sync::Lazy; use primitives::{ adapter::Adapter, - analytics::{AnalyticsQuery, Metric, AuthenticateAs, ANALYTICS_QUERY_LIMIT}, + analytics::{AnalyticsQuery, AuthenticateAs, Metric, ANALYTICS_QUERY_LIMIT}, sentry::{DateHour, FetchedAnalytics}, UnifiedNum, }; @@ -32,32 +32,29 @@ pub async fn analytics( let query = serde_urlencoded::from_str::(req.uri().query().unwrap_or(""))?; let period_in_hours = query.timeframe.to_hours(); let start_date = match query.start { - Some(start_date) => DateHour::try_from(start_date)?, - None => DateHour::try_from(Utc::today().and_hms(Utc::now().hour(), 0, 0) - Duration::hours(period_in_hours))?, - }; - - let end_date = match query.end { - Some(end_date) => Some(DateHour::try_from(end_date)?), - None => None, + Some(start_date) => start_date, + None => DateHour::try_from( + Utc::today().and_hms(Utc::now().hour(), 0, 0) - Duration::hours(period_in_hours), + )?, }; let applied_limit = query.limit.min(ANALYTICS_QUERY_LIMIT); - let allowed_keys = match allowed_keys { - Some(keys) => keys, - None => ALLOWED_KEYS.to_vec(), + let not_allowed_keys = match &allowed_keys { + Some(keys) => ALLOWED_KEYS.iter().filter(|k| !keys.contains(k)).collect(), + None => vec![], }; if let Some(segment_by) = &query.segment_by { - if !allowed_keys.contains(segment_by) { + if not_allowed_keys.contains(&segment_by) { return Err(ResponseError::BadRequest( "Disallowed segmentBy".to_string(), )); } } - for (key, _) in query.available_keys() { - if !allowed_keys.contains(&key) { + for key in not_allowed_keys { + if query.get_key(key).is_some() { return Err(ResponseError::BadRequest(format!( "disallowed key in query: {}", key @@ -65,32 +62,41 @@ pub async fn analytics( } } - let auth = req - .extensions() - .get::(); + let auth = req.extensions().get::(); let auth_as = match (auth_as_key, auth) { (Some(auth_as_key), Some(auth)) => AuthenticateAs::try_from(&auth_as_key, auth.uid), - (Some(_), None) => return Err(ResponseError::BadRequest("auth_as_key is provided but there is no Auth object".to_string())), - _ => None + (Some(_), None) => { + return Err(ResponseError::BadRequest( + "auth_as_key is provided but there is no Auth object".to_string(), + )) + } + _ => None, }; + let allowed_keys = allowed_keys.unwrap_or_else(|| ALLOWED_KEYS.to_vec()); let analytics = get_analytics( &app.pool, start_date, - end_date, &query, + allowed_keys, auth_as, applied_limit, ) .await?; - let output = split_entries_by_timeframe(analytics, period_in_hours, &query.metric, &query.segment_by); + let output = + split_entries_by_timeframe(analytics, period_in_hours, &query.metric, &query.segment_by); Ok(success_response(serde_json::to_string(&output)?)) } -fn split_entries_by_timeframe(mut analytics: Vec, period_in_hours: i64, metric: &Metric, segment: &Option) -> Vec { +fn split_entries_by_timeframe( + mut analytics: Vec, + period_in_hours: i64, + metric: &Metric, + segment: &Option, +) -> Vec { let mut res: Vec = vec![]; let period_in_hours = period_in_hours as usize; while analytics.len() > period_in_hours { @@ -100,7 +106,7 @@ fn split_entries_by_timeframe(mut analytics: Vec, period_in_ho res.push(merged_analytics); } - if analytics.len() > 0 { + if !analytics.is_empty() { let merged_analytics = merge_analytics(analytics, metric, segment); res.push(merged_analytics); } @@ -108,25 +114,31 @@ fn split_entries_by_timeframe(mut analytics: Vec, period_in_ho res } -fn merge_analytics(analytics: Vec, metric: &Metric, segment: &Option) -> FetchedAnalytics { +fn merge_analytics( + analytics: Vec, + metric: &Metric, + segment: &Option, +) -> FetchedAnalytics { let mut count = 0; let amount = UnifiedNum::from_u64(0); match metric { Metric::Count => { - analytics.iter().for_each(|a| count += a.payout_count.unwrap()); + analytics + .iter() + .for_each(|a| count += a.payout_count.unwrap()); FetchedAnalytics { - time: analytics.iter().nth(0).unwrap().time, + time: analytics.get(0).unwrap().time, payout_count: Some(count), payout_amount: None, segment: segment.clone(), } - }, + } Metric::Paid => { analytics.iter().for_each(|a| { amount.checked_add(&a.payout_amount.unwrap()).unwrap(); }); FetchedAnalytics { - time: analytics.iter().nth(0).unwrap().time, + time: analytics.get(0).unwrap().time, payout_count: None, payout_amount: Some(amount), segment: segment.clone(), @@ -156,22 +168,19 @@ fn merge_analytics(analytics: Vec, metric: &Metric, segment: & #[cfg(test)] mod test { use super::*; - use primitives::{ - sentry::UpdateAnalytics, - analytics::OperatingSystem, - util::tests::prep_db::{ - DUMMY_CAMPAIGN, ADDRESSES - }, - }; - use crate:: - { - test_util::setup_dummy_app, - routes::analytics::analytics, + use crate::{ db::{ analytics::update_analytics, tests_postgres::{setup_test_migrations, DATABASE_POOL}, - DbPool - } + DbPool, + }, + routes::analytics::analytics, + test_util::setup_dummy_app, + }; + use primitives::{ + analytics::OperatingSystem, + sentry::UpdateAnalytics, + util::tests::prep_db::{ADDRESSES, DUMMY_CAMPAIGN}, }; async fn insert_mock_analytics(pool: &DbPool) { @@ -192,7 +201,9 @@ mod test { amount_to_add: UnifiedNum::from_u64(1_000_000), count_to_add: 1, }; - update_analytics(pool, analytics_now).await.expect("Should update analytics"); + update_analytics(pool, analytics_now) + .await + .expect("Should update analytics"); let analytics_now_different_country = UpdateAnalytics { time: DateHour::try_from(now_date).expect("should parse"), @@ -209,7 +220,9 @@ mod test { amount_to_add: UnifiedNum::from_u64(1_000_000), count_to_add: 1, }; - update_analytics(pool, analytics_now_different_country).await.expect("Should update analytics"); + update_analytics(pool, analytics_now_different_country) + .await + .expect("Should update analytics"); let analytics_two_hours_ago = UpdateAnalytics { time: DateHour::try_from(now_date - Duration::hours(2)).expect("should parse"), @@ -226,7 +239,9 @@ mod test { amount_to_add: UnifiedNum::from_u64(1_000_000), count_to_add: 1, }; - update_analytics(pool, analytics_two_hours_ago).await.expect("Should update analytics"); + update_analytics(pool, analytics_two_hours_ago) + .await + .expect("Should update analytics"); let analytics_four_hours_ago = UpdateAnalytics { time: DateHour::try_from(now_date - Duration::hours(4)).expect("should parse"), @@ -243,7 +258,9 @@ mod test { amount_to_add: UnifiedNum::from_u64(1_000_000), count_to_add: 1, }; - update_analytics(pool, analytics_four_hours_ago).await.expect("Should update analytics"); + update_analytics(pool, analytics_four_hours_ago) + .await + .expect("Should update analytics"); let analytics_three_days_ago = UpdateAnalytics { time: DateHour::try_from(now_date - Duration::days(3)).expect("should parse"), @@ -260,7 +277,9 @@ mod test { amount_to_add: UnifiedNum::from_u64(1_000_000), count_to_add: 1, }; - update_analytics(pool, analytics_three_days_ago).await.expect("Should update analytics"); + update_analytics(pool, analytics_three_days_ago) + .await + .expect("Should update analytics"); // analytics from 10 days ago let analytics_ten_days_ago = UpdateAnalytics { time: DateHour::try_from(now_date - Duration::days(10)).expect("should parse"), @@ -277,7 +296,9 @@ mod test { amount_to_add: UnifiedNum::from_u64(1_000_000), count_to_add: 1, }; - update_analytics(pool, analytics_ten_days_ago).await.expect("Should update analytics"); + update_analytics(pool, analytics_ten_days_ago) + .await + .expect("Should update analytics"); let analytics_sixty_days_ago = UpdateAnalytics { time: DateHour::try_from(now_date - Duration::days(60)).expect("should parse"), @@ -294,7 +315,9 @@ mod test { amount_to_add: UnifiedNum::from_u64(1_000_000), count_to_add: 1, }; - update_analytics(pool, analytics_sixty_days_ago).await.expect("Should update analytics"); + update_analytics(pool, analytics_sixty_days_ago) + .await + .expect("Should update analytics"); let analytics_two_years_ago = UpdateAnalytics { time: DateHour::try_from(now_date - Duration::weeks(104)).expect("should parse"), @@ -311,7 +334,9 @@ mod test { amount_to_add: UnifiedNum::from_u64(1_000_000), count_to_add: 1, }; - update_analytics(pool, analytics_two_years_ago).await.expect("Should update analytics"); + update_analytics(pool, analytics_two_years_ago) + .await + .expect("Should update analytics"); } #[tokio::test] @@ -327,11 +352,18 @@ mod test { // Test with no optional values let req = Request::builder() - .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day") - .body(Body::empty()) - .expect("Should build Request"); + .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day") + .body(Body::empty()) + .expect("Should build Request"); - let analytics_response = analytics(req, &app, Some(vec!["country".into(), "ad_slot_type".into()]), None).await.expect("Should get analytics data"); + let analytics_response = analytics( + req, + &app, + Some(vec!["country".into(), "ad_slot_type".into()]), + None, + ) + .await + .expect("Should get analytics data"); let json = hyper::body::to_bytes(analytics_response.into_body()) .await .expect("Should get json"); @@ -348,7 +380,14 @@ mod test { .body(Body::empty()) .expect("Should build Request"); - let analytics_response = analytics(req, &app, Some(vec!["country".into(), "ad_slot_type".into()]), None).await.expect("Should get analytics data"); + let analytics_response = analytics( + req, + &app, + Some(vec!["country".into(), "ad_slot_type".into()]), + None, + ) + .await + .expect("Should get analytics data"); let json = hyper::body::to_bytes(analytics_response.into_body()) .await .expect("Should get json"); @@ -364,7 +403,14 @@ mod test { .body(Body::empty()) .expect("Should build Request"); - let analytics_response = analytics(req, &app, Some(vec!["country".into(), "ad_slot_type".into()]), None).await.expect("Should get analytics data"); + let analytics_response = analytics( + req, + &app, + Some(vec!["country".into(), "ad_slot_type".into()]), + None, + ) + .await + .expect("Should get analytics data"); let json = hyper::body::to_bytes(analytics_response.into_body()) .await .expect("Should get json"); @@ -381,7 +427,14 @@ mod test { .body(Body::empty()) .expect("Should build Request"); - let analytics_response = analytics(req, &app, Some(vec!["country".into(), "ad_slot_type".into()]), None).await.expect("Should get analytics data"); + let analytics_response = analytics( + req, + &app, + Some(vec!["country".into(), "ad_slot_type".into()]), + None, + ) + .await + .expect("Should get analytics data"); let json = hyper::body::to_bytes(analytics_response.into_body()) .await .expect("Should get json"); @@ -394,4 +447,4 @@ mod test { // test with not allowed key // test with different metric } -} \ No newline at end of file +} From 989f66d3df867f65ffad6bc7d178c4e2071c8ac0 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Tue, 7 Dec 2021 18:33:53 +0200 Subject: [PATCH 08/24] improvements on analytics --- primitives/src/analytics.rs | 30 ++++++++++++------------- primitives/src/sentry.rs | 41 +++++++++++++++++++++++++++++++--- sentry/src/db/analytics.rs | 6 +++-- sentry/src/routes/analytics.rs | 17 +++++++------- 4 files changed, 66 insertions(+), 28 deletions(-) diff --git a/primitives/src/analytics.rs b/primitives/src/analytics.rs index 01491c4b3..b1d6cb036 100644 --- a/primitives/src/analytics.rs +++ b/primitives/src/analytics.rs @@ -47,13 +47,13 @@ pub mod postgres { w: &mut BytesMut, ) -> Result> { match self { - Self::CampaignId(id) => id.to_string().to_sql(ty, w), - Self::AdUnit(ipfs) | Self::AdSlot(ipfs) => ipfs.to_string().to_sql(ty, w), + Self::CampaignId(id) => id.to_sql(ty, w), + Self::AdUnit(ipfs) | Self::AdSlot(ipfs) => ipfs.to_sql(ty, w), Self::AdSlotType(value) | Self::Hostname(value) | Self::Country(value) => { value.to_sql(ty, w) } - Self::Advertiser(addr) | Self::Publisher(addr) => addr.to_string().to_sql(ty, w), - Self::OperatingSystem(os_name) => os_name.to_string().to_sql(ty, w), + Self::Advertiser(addr) | Self::Publisher(addr) => addr.to_sql(ty, w), + Self::OperatingSystem(os_name) => os_name.to_sql(ty, w), } } @@ -126,18 +126,18 @@ pub enum AnalyticsQueryKey { } impl AnalyticsQuery { - pub fn get_key(&self, key: &str) -> &Option { + pub fn get_key(&self, key: &str) -> Option<&AnalyticsQueryKey> { match key { - "campaignId" => &self.campaign_id, - "adUnit" => &self.ad_unit, - "adSlot" => &self.ad_slot, - "adSlotType" => &self.ad_slot_type, - "advertiser" => &self.advertiser, - "publisher" => &self.publisher, - "hostname" => &self.hostname, - "country" => &self.country, - "osName" => &self.os_name, - _ => &None, + "campaignId" => self.campaign_id.as_ref(), + "adUnit" => self.ad_unit.as_ref(), + "adSlot" => self.ad_slot.as_ref(), + "adSlotType" => self.ad_slot_type.as_ref(), + "advertiser" => self.advertiser.as_ref(), + "publisher" => self.publisher.as_ref(), + "hostname" => self.hostname.as_ref(), + "country" => self.country.as_ref(), + "osName" => self.os_name.as_ref(), + _ => None, } } } diff --git a/primitives/src/sentry.rs b/primitives/src/sentry.rs index f7e5347e9..04bbdacf1 100644 --- a/primitives/src/sentry.rs +++ b/primitives/src/sentry.rs @@ -1,13 +1,13 @@ use crate::{ - analytics::OperatingSystem, + analytics::{OperatingSystem, Timeframe}, balances::BalancesState, spender::Spender, validator::{ApproveState, Heartbeat, MessageTypes, NewState, Type as MessageType}, Address, Balances, BigNum, CampaignId, Channel, ChannelId, UnifiedNum, ValidatorId, IPFS, }; -use chrono::{Date, DateTime, NaiveDate, TimeZone, Timelike, Utc}; +use chrono::{Date, DateTime, Duration, NaiveDate, TimeZone, Timelike, Utc}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; -use std::{collections::HashMap, fmt, hash::Hash}; +use std::{collections::HashMap, fmt, hash::Hash, ops::Sub}; use thiserror::Error; #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] @@ -235,6 +235,14 @@ pub struct DateHour { pub hour: u32, } +impl fmt::Display for DateHour { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let datetime = self.to_datetime(); + + datetime.fmt(f) + } +} + impl DateHour { /// # Panics /// @@ -306,6 +314,33 @@ impl TryFrom> for DateHour { } } +impl Sub<&Timeframe> for DateHour { + type Output = DateHour; + + fn sub(self, rhs: &Timeframe) -> Self::Output { + let result = self.to_datetime() - Duration::hours(rhs.to_hours()); + + DateHour { + date: result.date(), + hour: result.hour(), + } + } +} + +/// Subtracts **X** hours from the [`DateHour`] +impl Sub for DateHour { + type Output = DateHour; + + fn sub(self, rhs: i64) -> Self::Output { + let result = self.to_datetime() - Duration::hours(rhs); + + DateHour { + date: result.date(), + hour: result.hour(), + } + } +} + impl Serialize for DateHour { fn serialize(&self, serializer: S) -> Result where diff --git a/sentry/src/db/analytics.rs b/sentry/src/db/analytics.rs index 0ca951bda..f7d9772d6 100644 --- a/sentry/src/db/analytics.rs +++ b/sentry/src/db/analytics.rs @@ -72,8 +72,10 @@ fn analytics_query_params<'a>( let mut params: Vec<&(dyn ToSql + Sync)> = vec![start_date]; allowed_keys.iter().for_each(|key| { - where_clauses.push(format!("{} = ${}", key, params.len() + 1)); - params.push(query.get_key(key)); + if let Some(param_value) = query.get_key(key) { + where_clauses.push(format!("{} = ${}", key, params.len() + 1)); + params.push(param_value); + } }); if let Some(auth_as) = auth_as { diff --git a/sentry/src/routes/analytics.rs b/sentry/src/routes/analytics.rs index 636e9b70e..0ef6a7818 100644 --- a/sentry/src/routes/analytics.rs +++ b/sentry/src/routes/analytics.rs @@ -1,5 +1,4 @@ use crate::{db::analytics::get_analytics, success_response, Application, Auth, ResponseError}; -use chrono::{Duration, Timelike, Utc}; use hyper::{Body, Request, Response}; use once_cell::sync::Lazy; use primitives::{ @@ -33,9 +32,7 @@ pub async fn analytics( let period_in_hours = query.timeframe.to_hours(); let start_date = match query.start { Some(start_date) => start_date, - None => DateHour::try_from( - Utc::today().and_hms(Utc::now().hour(), 0, 0) - Duration::hours(period_in_hours), - )?, + None => DateHour::now() - &query.timeframe, }; let applied_limit = query.limit.min(ANALYTICS_QUERY_LIMIT); @@ -177,6 +174,7 @@ mod test { routes::analytics::analytics, test_util::setup_dummy_app, }; + use chrono::{Utc, Duration}; use primitives::{ analytics::OperatingSystem, sentry::UpdateAnalytics, @@ -374,7 +372,7 @@ mod test { assert_eq!(fetched_analytics.payout_count.unwrap(), 4); // Test with start date - let start_date = Utc::today().and_hms(Utc::now().hour(), 0, 0) - Duration::hours(1); + let start_date = DateHour::::now() - 1; let req = Request::builder() .uri(format!("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day&start={}", start_date)) .body(Body::empty()) @@ -397,7 +395,7 @@ mod test { assert_eq!(fetched_analytics.payout_count.unwrap(), 2); // Test with end date - let end_date = Utc::today().and_hms(Utc::now().hour(), 0, 0) - Duration::hours(1); + let end_date = DateHour::::now() - 1; let req = Request::builder() .uri(format!("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day&end={}", end_date)) .body(Body::empty()) @@ -420,8 +418,11 @@ mod test { assert_eq!(fetched_analytics.payout_count.unwrap(), 3); // Test with start_date and end_date - let start_date = Utc::today().and_hms(Utc::now().hour(), 0, 0) - Duration::hours(72); - let end_date = Utc::today().and_hms(Utc::now().hour(), 0, 0) - Duration::hours(1); + + // subtract 72 hours + let start_date = DateHour::::now() - 72; + // subtract 1 hour + let end_date = DateHour::::now() - 1; let req = Request::builder() .uri(format!("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day&start={}&end={}", start_date, end_date)) .body(Body::empty()) From fb6c8cfa2511a57e36ccba0c9dacac6a351d7157 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Wed, 8 Dec 2021 10:08:28 +0200 Subject: [PATCH 09/24] fix impl Display for DateHour --- primitives/src/sentry.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/primitives/src/sentry.rs b/primitives/src/sentry.rs index 04bbdacf1..2860246c7 100644 --- a/primitives/src/sentry.rs +++ b/primitives/src/sentry.rs @@ -235,7 +235,7 @@ pub struct DateHour { pub hour: u32, } -impl fmt::Display for DateHour { +impl fmt::Display for DateHour where Tz::Offset: fmt::Display { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let datetime = self.to_datetime(); From dc978521bee33d968bbafd8d73fa10f5edf769d3 Mon Sep 17 00:00:00 2001 From: Simeon Nakov Date: Wed, 8 Dec 2021 15:34:58 +0200 Subject: [PATCH 10/24] analytics query can now accept both timestamps and date strings --- primitives/src/analytics.rs | 36 +++++++++++++++++++++++++++++++--- primitives/src/sentry.rs | 5 ++++- sentry/src/db/analytics.rs | 11 +++++------ sentry/src/routes/analytics.rs | 18 ++++++++++++----- 4 files changed, 55 insertions(+), 15 deletions(-) diff --git a/primitives/src/analytics.rs b/primitives/src/analytics.rs index b1d6cb036..f96327b4d 100644 --- a/primitives/src/analytics.rs +++ b/primitives/src/analytics.rs @@ -7,8 +7,9 @@ pub const ANALYTICS_QUERY_LIMIT: u32 = 200; #[cfg(feature = "postgres")] pub mod postgres { - use super::{AnalyticsQueryKey, Metric, OperatingSystem}; + use super::{AnalyticsQueryKey, AnalyticsQueryTime, Metric, OperatingSystem}; use bytes::BytesMut; + use chrono::{DateTime, NaiveDateTime, Timelike, Utc}; use std::error::Error; use tokio_postgres::types::{accepts, to_sql_checked, FromSql, IsNull, ToSql, Type}; @@ -61,6 +62,28 @@ pub mod postgres { to_sql_checked!(); } + impl ToSql for AnalyticsQueryTime { + fn to_sql( + &self, + ty: &Type, + w: &mut BytesMut, + ) -> Result> { + match self { + Self::Date(datehour) => datehour.date.and_hms(datehour.hour, 0, 0).to_sql(ty, w), + Self::Timestamp(ts) => { + // Create a NaiveDateTime from the timestamp + let naive = NaiveDateTime::from_timestamp(0, *ts); + // Create a normal DateTime from the NaiveDateTime + let datetime: DateTime = DateTime::from_utc(naive, Utc); + datetime.date().and_hms(datetime.hour(), 0, 0).to_sql(ty, w) + } + } + } + + accepts!(TIMESTAMPTZ); + to_sql_checked!(); + } + impl ToSql for Metric { fn to_sql( &self, @@ -87,8 +110,8 @@ pub struct AnalyticsQuery { #[serde(default = "default_timeframe")] pub timeframe: Timeframe, pub segment_by: Option, - pub start: Option>, - pub end: Option>, + pub start: Option, + pub end: Option, // #[serde(default = "default_timezone")] // pub timezone: String, #[serde(flatten)] @@ -111,6 +134,13 @@ pub struct AnalyticsQuery { pub os_name: Option, } +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum AnalyticsQueryTime { + Date(DateHour), + Timestamp(u32), +} + #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub enum AnalyticsQueryKey { diff --git a/primitives/src/sentry.rs b/primitives/src/sentry.rs index 2860246c7..39d7f7859 100644 --- a/primitives/src/sentry.rs +++ b/primitives/src/sentry.rs @@ -235,7 +235,10 @@ pub struct DateHour { pub hour: u32, } -impl fmt::Display for DateHour where Tz::Offset: fmt::Display { +impl fmt::Display for DateHour +where + Tz::Offset: fmt::Display, +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let datetime = self.to_datetime(); diff --git a/sentry/src/db/analytics.rs b/sentry/src/db/analytics.rs index f7d9772d6..587c966a2 100644 --- a/sentry/src/db/analytics.rs +++ b/sentry/src/db/analytics.rs @@ -1,7 +1,6 @@ -use chrono::Utc; use primitives::{ - analytics::{AnalyticsQuery, AuthenticateAs}, - sentry::{Analytics, DateHour, FetchedAnalytics, UpdateAnalytics}, + analytics::{AnalyticsQuery, AnalyticsQueryTime, AuthenticateAs}, + sentry::{Analytics, FetchedAnalytics, UpdateAnalytics}, }; use tokio_postgres::types::ToSql; @@ -9,7 +8,7 @@ use super::{DbPool, PoolError}; pub async fn get_analytics( pool: &DbPool, - start_date: DateHour, + start_date: &AnalyticsQueryTime, query: &AnalyticsQuery, allowed_keys: Vec, auth_as: Option, @@ -18,7 +17,7 @@ pub async fn get_analytics( let client = pool.get().await?; let (where_clauses, mut params) = - analytics_query_params(&start_date, query, &auth_as, &allowed_keys); + analytics_query_params(start_date, query, &auth_as, &allowed_keys); let mut select_clause = vec!["time".to_string(), format!("${}", params.len() + 1)]; params.push(&query.metric); @@ -63,7 +62,7 @@ pub async fn get_analytics( } fn analytics_query_params<'a>( - start_date: &'a DateHour, + start_date: &'a AnalyticsQueryTime, query: &'a AnalyticsQuery, auth_as: &'a Option, allowed_keys: &[String], diff --git a/sentry/src/routes/analytics.rs b/sentry/src/routes/analytics.rs index 0ef6a7818..7f94d0e2b 100644 --- a/sentry/src/routes/analytics.rs +++ b/sentry/src/routes/analytics.rs @@ -1,9 +1,12 @@ use crate::{db::analytics::get_analytics, success_response, Application, Auth, ResponseError}; +use chrono::{Duration, Timelike, Utc}; use hyper::{Body, Request, Response}; use once_cell::sync::Lazy; use primitives::{ adapter::Adapter, - analytics::{AnalyticsQuery, AuthenticateAs, Metric, ANALYTICS_QUERY_LIMIT}, + analytics::{ + AnalyticsQuery, AnalyticsQueryTime, AuthenticateAs, Metric, ANALYTICS_QUERY_LIMIT, + }, sentry::{DateHour, FetchedAnalytics}, UnifiedNum, }; @@ -31,8 +34,13 @@ pub async fn analytics( let query = serde_urlencoded::from_str::(req.uri().query().unwrap_or(""))?; let period_in_hours = query.timeframe.to_hours(); let start_date = match query.start { - Some(start_date) => start_date, - None => DateHour::now() - &query.timeframe, + Some(ref start_date) => start_date.to_owned(), + None => { + let datetime = Utc::now() - Duration::hours(period_in_hours); + let datehour = + DateHour::try_from(datetime.date().and_hms(datetime.hour(), 0, 0)).unwrap(); + AnalyticsQueryTime::Date(datehour) + } }; let applied_limit = query.limit.min(ANALYTICS_QUERY_LIMIT); @@ -74,7 +82,7 @@ pub async fn analytics( let allowed_keys = allowed_keys.unwrap_or_else(|| ALLOWED_KEYS.to_vec()); let analytics = get_analytics( &app.pool, - start_date, + &start_date, &query, allowed_keys, auth_as, @@ -174,7 +182,7 @@ mod test { routes::analytics::analytics, test_util::setup_dummy_app, }; - use chrono::{Utc, Duration}; + use chrono::{Duration, Utc}; use primitives::{ analytics::OperatingSystem, sentry::UpdateAnalytics, From 526b7085ea10847e2209bd0b981461da613fc2b9 Mon Sep 17 00:00:00 2001 From: Simeon Nakov Date: Thu, 9 Dec 2021 21:46:54 +0200 Subject: [PATCH 11/24] tests almost ready + changes to logic --- primitives/src/analytics.rs | 46 +++++--- primitives/src/sentry.rs | 2 +- sentry/src/db/analytics.rs | 24 ++-- sentry/src/routes/analytics.rs | 196 ++++++++++++++++++++++++++++----- 4 files changed, 216 insertions(+), 52 deletions(-) diff --git a/primitives/src/analytics.rs b/primitives/src/analytics.rs index f96327b4d..7d079bdc9 100644 --- a/primitives/src/analytics.rs +++ b/primitives/src/analytics.rs @@ -1,13 +1,13 @@ use crate::{sentry::DateHour, Address, CampaignId, ValidatorId, IPFS}; -use chrono::Utc; +use chrono::{DateTime, NaiveDateTime, Utc}; use parse_display::Display; -use serde::{Deserialize, Serialize}; +use serde::{Deserialize, Deserializer, Serialize}; pub const ANALYTICS_QUERY_LIMIT: u32 = 200; #[cfg(feature = "postgres")] pub mod postgres { - use super::{AnalyticsQueryKey, AnalyticsQueryTime, Metric, OperatingSystem}; + use super::{AnalyticsQueryKey, AnalyticsQueryTime, OperatingSystem}; use bytes::BytesMut; use chrono::{DateTime, NaiveDateTime, Timelike, Utc}; use std::error::Error; @@ -83,19 +83,6 @@ pub mod postgres { accepts!(TIMESTAMPTZ); to_sql_checked!(); } - - impl ToSql for Metric { - fn to_sql( - &self, - ty: &Type, - w: &mut BytesMut, - ) -> Result> { - self.column_name().to_sql(ty, w) - } - - accepts!(TEXT, VARCHAR); - to_sql_checked!(); - } } #[derive(Debug, Serialize, Deserialize)] @@ -110,7 +97,11 @@ pub struct AnalyticsQuery { #[serde(default = "default_timeframe")] pub timeframe: Timeframe, pub segment_by: Option, + #[serde(default)] + #[serde(deserialize_with = "deserialize_query_time")] pub start: Option, + #[serde(default)] + #[serde(deserialize_with = "deserialize_query_time")] pub end: Option, // #[serde(default = "default_timezone")] // pub timezone: String, @@ -217,7 +208,7 @@ impl AuthenticateAs { } impl Metric { - pub fn column_name(&self) -> String { + pub fn column_name(self) -> String { match self { Metric::Count => "payout_count".to_string(), Metric::Paid => "payout_amount".to_string(), @@ -335,6 +326,27 @@ fn default_timeframe() -> Timeframe { Timeframe::Day } +fn deserialize_query_time<'de, D>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + let date_as_str = String::deserialize(deserializer)?; + let naive = NaiveDateTime::parse_from_str(&date_as_str, "%Y-%m-%dT%H:%M:%SZ"); + match naive { + Ok(naive) => { + let datetime: DateTime = DateTime::from_utc(naive, Utc); + let dh = DateHour::try_from(datetime).map_err(serde::de::Error::custom)?; + Ok(Some(AnalyticsQueryTime::Date(dh))) + } + _ => { + let timestamp = date_as_str + .parse::() + .map_err(serde::de::Error::custom)?; + Ok(Some(AnalyticsQueryTime::Timestamp(timestamp))) + } + } +} + // fn default_timezone() -> String { // "UTC".into() // } diff --git a/primitives/src/sentry.rs b/primitives/src/sentry.rs index 39d7f7859..9d23e1666 100644 --- a/primitives/src/sentry.rs +++ b/primitives/src/sentry.rs @@ -210,7 +210,7 @@ pub struct FetchedAnalytics { // time is represented as a timestamp pub time: i64, pub payout_amount: Option, - pub payout_count: Option, + pub payout_count: Option, // We can't know the exact segment type but it can always be represented as a string pub segment: Option, } diff --git a/sentry/src/db/analytics.rs b/sentry/src/db/analytics.rs index 587c966a2..1d5cc6087 100644 --- a/sentry/src/db/analytics.rs +++ b/sentry/src/db/analytics.rs @@ -1,5 +1,6 @@ +use chrono::{DateTime, Utc}; use primitives::{ - analytics::{AnalyticsQuery, AnalyticsQueryTime, AuthenticateAs}, + analytics::{AnalyticsQuery, AnalyticsQueryTime, AuthenticateAs, Metric}, sentry::{Analytics, FetchedAnalytics, UpdateAnalytics}, }; use tokio_postgres::types::ToSql; @@ -16,11 +17,14 @@ pub async fn get_analytics( ) -> Result, PoolError> { let client = pool.get().await?; - let (where_clauses, mut params) = + let (where_clauses, params) = analytics_query_params(start_date, query, &auth_as, &allowed_keys); - let mut select_clause = vec!["time".to_string(), format!("${}", params.len() + 1)]; - params.push(&query.metric); + let mut select_clause = vec!["time".to_string()]; + match &query.metric { + Metric::Paid => select_clause.push("payout_amount".to_string()), + Metric::Count => select_clause.push("payout_count".to_string()), + } let mut group_clause = vec!["time".to_string()]; if let Some(segment_by) = &query.segment_by { @@ -28,15 +32,15 @@ pub async fn get_analytics( group_clause.push(segment_by.to_string()); } + // TODO: Is a GROUP BY clause really needed here? let sql_query = format!( - "SELECT {} FROM analytics WHERE {} GROUP BY {} ORDER BY time ASC LIMIT {}", - select_clause.join(","), + "SELECT {} FROM analytics WHERE {} ORDER BY time ASC LIMIT {}", + select_clause.join(", "), where_clauses.join(" AND "), - group_clause.join(","), + // group_clause.join(", "), limit, ); - println!("{}", sql_query); // execute query let stmt = client.prepare(&sql_query).await?; let rows = client.query(&stmt, params.as_slice()).await?; @@ -49,8 +53,10 @@ pub async fn get_analytics( Some(segment_by) => row.try_get(&**segment_by).ok(), None => None, }; + let time = row.get::<_, DateTime>("time"); + FetchedAnalytics { - time: row.get("time"), + time: time.timestamp(), payout_amount: row.try_get("payout_amount").ok(), payout_count: row.try_get("payout_count").ok(), segment: segment_value, diff --git a/sentry/src/routes/analytics.rs b/sentry/src/routes/analytics.rs index 7f94d0e2b..cb5ca4fea 100644 --- a/sentry/src/routes/analytics.rs +++ b/sentry/src/routes/analytics.rs @@ -52,8 +52,14 @@ pub async fn analytics( if let Some(segment_by) = &query.segment_by { if not_allowed_keys.contains(&segment_by) { + return Err(ResponseError::BadRequest(format!( + "Disallowed segmentBy: {}", + segment_by + ))); + } + if query.get_key(segment_by).is_none() { return Err(ResponseError::BadRequest( - "Disallowed segmentBy".to_string(), + "SegmentBy is provided but a key is not passed".to_string(), )); } } @@ -104,6 +110,8 @@ fn split_entries_by_timeframe( ) -> Vec { let mut res: Vec = vec![]; let period_in_hours = period_in_hours as usize; + // TODO: If there is an hour with no events this logic will fail + // FIX BEFORE MERGE! while analytics.len() > period_in_hours { let drain_index = analytics.len() - period_in_hours; let analytics_fraction: Vec = analytics.drain(drain_index..).collect(); @@ -115,7 +123,6 @@ fn split_entries_by_timeframe( let merged_analytics = merge_analytics(analytics, metric, segment); res.push(merged_analytics); } - res } @@ -174,11 +181,7 @@ fn merge_analytics( mod test { use super::*; use crate::{ - db::{ - analytics::update_analytics, - tests_postgres::{setup_test_migrations, DATABASE_POOL}, - DbPool, - }, + db::{analytics::update_analytics, DbPool}, routes::analytics::analytics, test_util::setup_dummy_app, }; @@ -347,14 +350,9 @@ mod test { #[tokio::test] async fn test_analytics_route_for_guest() { - let database = DATABASE_POOL.get().await.expect("Should get a DB pool"); let app = setup_dummy_app().await; - setup_test_migrations(database.pool.clone()) - .await - .expect("Migrations should succeed"); - - insert_mock_analytics(&database.pool).await; + insert_mock_analytics(&app.pool).await; // Test with no optional values let req = Request::builder() @@ -374,13 +372,21 @@ mod test { .await .expect("Should get json"); - let fetched_analytics: FetchedAnalytics = + let fetched_analytics: Vec = serde_json::from_slice(&json).expect("Should get analytics response"); - assert!(fetched_analytics.payout_count.is_some()); - assert_eq!(fetched_analytics.payout_count.unwrap(), 4); - // Test with start date + assert_eq!(fetched_analytics.len(), 1); + assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); + assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 4); + // Test with start date let start_date = DateHour::::now() - 1; + // let query = format!( + // "?limit=100&eventType=CLICK&metric=count&timeframe=day&start={}", + // start_date + // ); + // let query_str = + // serde_urlencoded::from_str::(&query) + // .expect("should encode"); let req = Request::builder() .uri(format!("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day&start={}", start_date)) .body(Body::empty()) @@ -398,9 +404,11 @@ mod test { .await .expect("Should get json"); - let fetched_analytics: FetchedAnalytics = + let fetched_analytics: Vec = serde_json::from_slice(&json).expect("Should get analytics response"); - assert_eq!(fetched_analytics.payout_count.unwrap(), 2); + assert_eq!(fetched_analytics.len(), 1); + assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); + assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 2); // Test with end date let end_date = DateHour::::now() - 1; @@ -421,12 +429,13 @@ mod test { .await .expect("Should get json"); - let fetched_analytics: FetchedAnalytics = + let fetched_analytics: Vec = serde_json::from_slice(&json).expect("Should get analytics response"); - assert_eq!(fetched_analytics.payout_count.unwrap(), 3); + assert_eq!(fetched_analytics.len(), 1); + assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); + assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 3); // Test with start_date and end_date - // subtract 72 hours let start_date = DateHour::::now() - 72; // subtract 1 hour @@ -448,12 +457,149 @@ mod test { .await .expect("Should get json"); - let fetched_analytics: FetchedAnalytics = + let fetched_analytics: Vec = serde_json::from_slice(&json).expect("Should get analytics response"); - assert_eq!(fetched_analytics.payout_count.unwrap(), 2); + assert_eq!(fetched_analytics.len(), 1); + assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); + assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 2); + // Test with segment_by - // test with not allowed segment by + let req = Request::builder() + .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day&segment_by=country&country=Bulgaria") + .body(Body::empty()) + .expect("Should build Request"); + + let analytics_response = analytics( + req, + &app, + Some(vec!["country".into(), "ad_slot_type".into()]), + None, + ) + .await + .expect("Should get analytics data"); + let json = hyper::body::to_bytes(analytics_response.into_body()) + .await + .expect("Should get json"); + + let fetched_analytics: Vec = + serde_json::from_slice(&json).expect("Should get analytics response"); + assert_eq!(fetched_analytics.len(), 1); + assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); + assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 4); + + // Test with not allowed segment by + let req = Request::builder() + .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day&segment_by=campaignId&campaignId=0x936da01f9abd4d9d80c702af85c822a8") + .body(Body::empty()) + .expect("Should build Request"); + + let analytics_response = analytics( + req, + &app, + Some(vec!["country".into(), "ad_slot_type".into()]), + None, + ) + .await; + + let err_msg = "Disallowed segmentBy: campaignId".to_string(); + assert!(matches!( + analytics_response, + Err(ResponseError::BadRequest(err_msg)) + )); + // test with not allowed key + let req = Request::builder() + .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day&campaignId=0x936da01f9abd4d9d80c702af85c822a8") + .body(Body::empty()) + .expect("Should build Request"); + + let analytics_response = analytics( + req, + &app, + Some(vec!["country".into(), "ad_slot_type".into()]), + None, + ) + .await; + + let err_msg = "disallowed key in query: campaignId".to_string(); + assert!(matches!( + analytics_response, + Err(ResponseError::BadRequest(err_msg)) + )); + + // test with not segmentBy which is then not provided + let req = Request::builder() + .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day&segmentBy=country") + .body(Body::empty()) + .expect("Should build Request"); + + let analytics_response = analytics( + req, + &app, + Some(vec!["country".into(), "ad_slot_type".into()]), + None, + ) + .await; + + let err_msg = "SegmentBy is provided but a key is not passed".to_string(); + assert!(matches!( + analytics_response, + Err(ResponseError::BadRequest(err_msg)) + )); + // test with different metric + let req = Request::builder() + .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=paid&timeframe=day") + .body(Body::empty()) + .expect("Should build Request"); + + let analytics_response = analytics( + req, + &app, + Some(vec!["country".into(), "ad_slot_type".into()]), + None, + ) + .await + .expect("Should get analytics data"); + let json = hyper::body::to_bytes(analytics_response.into_body()) + .await + .expect("Should get json"); + + let fetched_analytics: Vec = + serde_json::from_slice(&json).expect("Should get analytics response"); + assert_eq!(fetched_analytics.len(), 1); + assert!(fetched_analytics.get(0).unwrap().payout_amount.is_some()); + assert_eq!( + fetched_analytics.get(0).unwrap().payout_amount.unwrap(), + UnifiedNum::from_u64(4_000_000) + ); + + // Test with different timeframe + let req = Request::builder() + .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=week") + .body(Body::empty()) + .expect("Should build Request"); + + let analytics_response = analytics( + req, + &app, + Some(vec!["country".into(), "ad_slot_type".into()]), + None, + ) + .await + .expect("Should get analytics data"); + let json = hyper::body::to_bytes(analytics_response.into_body()) + .await + .expect("Should get json"); + + let fetched_analytics: Vec = + serde_json::from_slice(&json).expect("Should get analytics response"); + assert_eq!(fetched_analytics.len(), 1); + assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); + assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 5); } + // test for publisher + // test for advertiser + // test for admin + // test for admin with all optional keys } From 381533dcd55a838b5fa7db6f7e89253aa75ffe2b Mon Sep 17 00:00:00 2001 From: Simeon Nakov Date: Fri, 10 Dec 2021 19:54:53 +0200 Subject: [PATCH 12/24] Additional tests --- sentry/src/routes/analytics.rs | 388 ++++++++++++++++++++++++++++++--- 1 file changed, 356 insertions(+), 32 deletions(-) diff --git a/sentry/src/routes/analytics.rs b/sentry/src/routes/analytics.rs index cb5ca4fea..3e8f3a67c 100644 --- a/sentry/src/routes/analytics.rs +++ b/sentry/src/routes/analytics.rs @@ -1,5 +1,4 @@ use crate::{db::analytics::get_analytics, success_response, Application, Auth, ResponseError}; -use chrono::{Duration, Timelike, Utc}; use hyper::{Body, Request, Response}; use once_cell::sync::Lazy; use primitives::{ @@ -35,12 +34,7 @@ pub async fn analytics( let period_in_hours = query.timeframe.to_hours(); let start_date = match query.start { Some(ref start_date) => start_date.to_owned(), - None => { - let datetime = Utc::now() - Duration::hours(period_in_hours); - let datehour = - DateHour::try_from(datetime.date().and_hms(datetime.hour(), 0, 0)).unwrap(); - AnalyticsQueryTime::Date(datehour) - } + None => AnalyticsQueryTime::Date(DateHour::now() - period_in_hours), }; let applied_limit = query.limit.min(ANALYTICS_QUERY_LIMIT); @@ -184,19 +178,20 @@ mod test { db::{analytics::update_analytics, DbPool}, routes::analytics::analytics, test_util::setup_dummy_app, + ValidatorId, }; - use chrono::{Duration, Utc}; + use chrono::Utc; use primitives::{ - analytics::OperatingSystem, + analytics::{OperatingSystem, Timeframe}, sentry::UpdateAnalytics, - util::tests::prep_db::{ADDRESSES, DUMMY_CAMPAIGN}, + util::tests::prep_db::{ADDRESSES, DUMMY_CAMPAIGN, DUMMY_IPFS}, }; async fn insert_mock_analytics(pool: &DbPool) { // analytics for NOW - let now_date = Utc::today().and_hms(1, 0, 0); + let now_date = DateHour::try_from(Utc::today().and_hms(1, 0, 0)).expect("should parse"); let analytics_now = UpdateAnalytics { - time: DateHour::try_from(now_date).expect("should parse"), + time: now_date, campaign_id: DUMMY_CAMPAIGN.id, ad_unit: None, ad_slot: None, @@ -215,7 +210,7 @@ mod test { .expect("Should update analytics"); let analytics_now_different_country = UpdateAnalytics { - time: DateHour::try_from(now_date).expect("should parse"), + time: now_date, campaign_id: DUMMY_CAMPAIGN.id, ad_unit: None, ad_slot: None, @@ -234,7 +229,7 @@ mod test { .expect("Should update analytics"); let analytics_two_hours_ago = UpdateAnalytics { - time: DateHour::try_from(now_date - Duration::hours(2)).expect("should parse"), + time: now_date - 2, campaign_id: DUMMY_CAMPAIGN.id, ad_unit: None, ad_slot: None, @@ -253,7 +248,7 @@ mod test { .expect("Should update analytics"); let analytics_four_hours_ago = UpdateAnalytics { - time: DateHour::try_from(now_date - Duration::hours(4)).expect("should parse"), + time: now_date - 4, campaign_id: DUMMY_CAMPAIGN.id, ad_unit: None, ad_slot: None, @@ -272,7 +267,7 @@ mod test { .expect("Should update analytics"); let analytics_three_days_ago = UpdateAnalytics { - time: DateHour::try_from(now_date - Duration::days(3)).expect("should parse"), + time: now_date - (24 * 3), campaign_id: DUMMY_CAMPAIGN.id, ad_unit: None, ad_slot: None, @@ -291,7 +286,7 @@ mod test { .expect("Should update analytics"); // analytics from 10 days ago let analytics_ten_days_ago = UpdateAnalytics { - time: DateHour::try_from(now_date - Duration::days(10)).expect("should parse"), + time: now_date - (24 * 10), campaign_id: DUMMY_CAMPAIGN.id, ad_unit: None, ad_slot: None, @@ -310,7 +305,7 @@ mod test { .expect("Should update analytics"); let analytics_sixty_days_ago = UpdateAnalytics { - time: DateHour::try_from(now_date - Duration::days(60)).expect("should parse"), + time: now_date - (24 * 60), campaign_id: DUMMY_CAMPAIGN.id, ad_unit: None, ad_slot: None, @@ -329,7 +324,7 @@ mod test { .expect("Should update analytics"); let analytics_two_years_ago = UpdateAnalytics { - time: DateHour::try_from(now_date - Duration::weeks(104)).expect("should parse"), + time: now_date - (24 * 7 * 104), campaign_id: DUMMY_CAMPAIGN.id, ad_unit: None, ad_slot: None, @@ -380,15 +375,28 @@ mod test { // Test with start date let start_date = DateHour::::now() - 1; - // let query = format!( - // "?limit=100&eventType=CLICK&metric=count&timeframe=day&start={}", - // start_date - // ); - // let query_str = - // serde_urlencoded::from_str::(&query) - // .expect("should encode"); + + let query = AnalyticsQuery { + limit: 1000, + event_type: "CLICK".into(), + metric: Metric::Count, + timeframe: Timeframe::Day, + segment_by: None, + start: Some(AnalyticsQueryTime::Date(start_date)), + end: None, + campaign_id: None, + ad_unit: None, + ad_slot: None, + ad_slot_type: None, + advertiser: None, + publisher: None, + hostname: None, + country: None, + os_name: None + }; + let query = serde_urlencoded::to_string(query).expect("should parse query"); let req = Request::builder() - .uri(format!("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day&start={}", start_date)) + .extension(query) .body(Body::empty()) .expect("Should build Request"); @@ -527,7 +535,7 @@ mod test { Err(ResponseError::BadRequest(err_msg)) )); - // test with not segmentBy which is then not provided + // test with segmentBy which is then not provided let req = Request::builder() .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day&segmentBy=country") .body(Body::empty()) @@ -597,9 +605,325 @@ mod test { assert_eq!(fetched_analytics.len(), 1); assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 5); + + // Test with a limit + let req = Request::builder() + .uri("http://127.0.0.1/analytics?limit=2&eventType=CLICK&metric=count&timeframe=day") + .body(Body::empty()) + .expect("Should build Request"); + + let analytics_response = analytics( + req, + &app, + Some(vec!["country".into(), "ad_slot_type".into()]), + None, + ) + .await + .expect("Should get analytics data"); + let json = hyper::body::to_bytes(analytics_response.into_body()) + .await + .expect("Should get json"); + + let fetched_analytics: Vec = + serde_json::from_slice(&json).expect("Should get analytics response"); + assert_eq!(fetched_analytics.len(), 1); + assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); + assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 2); + // Test with a month timeframe + let req = Request::builder() + .uri( + "http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=month", + ) + .body(Body::empty()) + .expect("Should build Request"); + + let analytics_response = analytics( + req, + &app, + Some(vec!["country".into(), "ad_slot_type".into()]), + None, + ) + .await + .expect("Should get analytics data"); + let json = hyper::body::to_bytes(analytics_response.into_body()) + .await + .expect("Should get json"); + + let fetched_analytics: Vec = + serde_json::from_slice(&json).expect("Should get analytics response"); + assert_eq!(fetched_analytics.len(), 1); + assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); + assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 6); + // Test with a year timeframe + let req = Request::builder() + .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=year") + .body(Body::empty()) + .expect("Should build Request"); + + let analytics_response = analytics( + req, + &app, + Some(vec!["country".into(), "ad_slot_type".into()]), + None, + ) + .await + .expect("Should get analytics data"); + let json = hyper::body::to_bytes(analytics_response.into_body()) + .await + .expect("Should get json"); + + let fetched_analytics: Vec = + serde_json::from_slice(&json).expect("Should get analytics response"); + assert_eq!(fetched_analytics.len(), 1); + assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); + assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 7); + + // Test with start and end as timestamps + } + + async fn insert_mock_analytics_for_auth_routes(pool: &DbPool) { + // Analytics with publisher and advertiser + let now_date = DateHour::try_from(Utc::today().and_hms(1, 0, 0)).expect("should parse"); + let analytics_now = UpdateAnalytics { + time: now_date, + campaign_id: DUMMY_CAMPAIGN.id, + ad_unit: Some(DUMMY_IPFS[0]), + ad_slot: Some(DUMMY_IPFS[1]), + ad_slot_type: None, + advertiser: ADDRESSES["creator"], + publisher: ADDRESSES["publisher"], + hostname: None, + country: Some("Bulgaria".to_string()), + os_name: OperatingSystem::map_os("Windows"), + event_type: "CLICK".to_string(), + amount_to_add: UnifiedNum::from_u64(1_000_000), + count_to_add: 1, + }; + update_analytics(pool, analytics_now) + .await + .expect("Should update analytics"); + // Analytics with a different unit/slot + let analytics_now = UpdateAnalytics { + time: now_date, + campaign_id: DUMMY_CAMPAIGN.id, + ad_unit: Some(DUMMY_IPFS[2]), + ad_slot: Some(DUMMY_IPFS[3]), + ad_slot_type: None, + advertiser: ADDRESSES["creator"], + publisher: ADDRESSES["publisher"], + hostname: None, + country: Some("Bulgaria".to_string()), + os_name: OperatingSystem::map_os("Windows"), + event_type: "CLICK".to_string(), + amount_to_add: UnifiedNum::from_u64(1_000_000), + count_to_add: 1, + }; + update_analytics(pool, analytics_now) + .await + .expect("Should update analytics"); + // Analytics with a different event type + let analytics_now = UpdateAnalytics { + time: now_date, + campaign_id: DUMMY_CAMPAIGN.id, + ad_unit: Some(DUMMY_IPFS[0]), + ad_slot: Some(DUMMY_IPFS[1]), + ad_slot_type: None, + advertiser: ADDRESSES["creator"], + publisher: ADDRESSES["publisher"], + hostname: None, + country: Some("Bulgaria".to_string()), + os_name: OperatingSystem::map_os("Windows"), + event_type: "IMPRESSION".to_string(), + amount_to_add: UnifiedNum::from_u64(1_000_000), + count_to_add: 1, + }; + update_analytics(pool, analytics_now) + .await + .expect("Should update analytics"); + // Analytics with no None fields + let analytics_now = UpdateAnalytics { + time: now_date - 2, + campaign_id: DUMMY_CAMPAIGN.id, + ad_unit: Some(DUMMY_IPFS[0]), + ad_slot: Some(DUMMY_IPFS[1]), + ad_slot_type: Some("TEST_TYPE".to_string()), + advertiser: ADDRESSES["creator"], + publisher: ADDRESSES["publisher"], + hostname: Some("localhost".to_string()), + country: Some("Bulgaria".to_string()), + os_name: OperatingSystem::map_os("Windows"), + event_type: "CLICK".to_string(), + amount_to_add: UnifiedNum::from_u64(1_000_000), + count_to_add: 1, + }; + update_analytics(pool, analytics_now) + .await + .expect("Should update analytics"); + // Analytics with different publisher + let analytics_now = UpdateAnalytics { + time: now_date, + campaign_id: DUMMY_CAMPAIGN.id, + ad_unit: Some(DUMMY_IPFS[0]), + ad_slot: Some(DUMMY_IPFS[1]), + ad_slot_type: None, + advertiser: ADDRESSES["creator"], + publisher: ADDRESSES["publisher2"], + hostname: None, + country: Some("Bulgaria".to_string()), + os_name: OperatingSystem::map_os("Windows"), + event_type: "CLICK".to_string(), + amount_to_add: UnifiedNum::from_u64(1_000_000), + count_to_add: 1, + }; + update_analytics(pool, analytics_now) + .await + .expect("Should update analytics"); + // Analytics with different advertiser + let analytics_now = UpdateAnalytics { + time: now_date, + campaign_id: DUMMY_CAMPAIGN.id, + ad_unit: Some(DUMMY_IPFS[0]), + ad_slot: Some(DUMMY_IPFS[1]), + ad_slot_type: None, + advertiser: ADDRESSES["tester"], + publisher: ADDRESSES["publisher"], + hostname: None, + country: Some("Bulgaria".to_string()), + os_name: OperatingSystem::map_os("Windows"), + event_type: "CLICK".to_string(), + amount_to_add: UnifiedNum::from_u64(1_000_000), + count_to_add: 1, + }; + update_analytics(pool, analytics_now) + .await + .expect("Should update analytics"); + // Analytics with both a different publisher and advertiser + let analytics_now = UpdateAnalytics { + time: now_date, + campaign_id: DUMMY_CAMPAIGN.id, + ad_unit: Some(DUMMY_IPFS[0]), + ad_slot: Some(DUMMY_IPFS[1]), + ad_slot_type: None, + advertiser: ADDRESSES["tester"], + publisher: ADDRESSES["publisher2"], + hostname: None, + country: Some("Bulgaria".to_string()), + os_name: OperatingSystem::map_os("Windows"), + event_type: "CLICK".to_string(), + amount_to_add: UnifiedNum::from_u64(1_000_000), + count_to_add: 1, + }; + update_analytics(pool, analytics_now) + .await + .expect("Should update analytics"); + } + #[tokio::test] + async fn test_analytics_route_with_auth() { + let app = setup_dummy_app().await; + insert_mock_analytics_for_auth_routes(&app.pool).await; + + let publisher_auth = Auth { + era: 0, + uid: ValidatorId::from(ADDRESSES["publisher"]), + }; + let advertiser_auth = Auth { + era: 0, + uid: ValidatorId::from(ADDRESSES["advertiser"]), + }; + let admin_auth = Auth { + era: 0, + uid: ValidatorId::try_from("0xce07CbB7e054514D590a0262C93070D838bFBA2e") + .expect("should create"), + }; + // test for publisher + let req = Request::builder() + .extension(publisher_auth.clone()) + .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day") + .body(Body::empty()) + .expect("Should build Request"); + + let analytics_response = analytics(req, &app, None, Some("publisher".to_string())) + .await + .expect("Should get analytics data"); + let json = hyper::body::to_bytes(analytics_response.into_body()) + .await + .expect("Should get json"); + + let fetched_analytics: Vec = + serde_json::from_slice(&json).expect("Should get analytics response"); + assert_eq!(fetched_analytics.len(), 1); + assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); + assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 5); + // test for advertiser + let req = Request::builder() + .extension(advertiser_auth) + .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day") + .body(Body::empty()) + .expect("Should build Request"); + + let analytics_response = analytics(req, &app, None, Some("advertiser".to_string())) + .await + .expect("Should get analytics data"); + let json = hyper::body::to_bytes(analytics_response.into_body()) + .await + .expect("Should get json"); + + let fetched_analytics: Vec = + serde_json::from_slice(&json).expect("Should get analytics response"); + assert_eq!(fetched_analytics.len(), 1); + assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); + assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 5); + // test for admin + let req = Request::builder() + .extension(admin_auth.clone()) + .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day") + .body(Body::empty()) + .expect("Should build Request"); + + let analytics_response = analytics(req, &app, None, None) + .await + .expect("Should get analytics data"); + let json = hyper::body::to_bytes(analytics_response.into_body()) + .await + .expect("Should get json"); + + let fetched_analytics: Vec = + serde_json::from_slice(&json).expect("Should get analytics response"); + assert_eq!(fetched_analytics.len(), 1); + assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); + assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 7); + // test for admin with all optional keys + let start_date = DateHour::::now() - 72; + let end_date = DateHour::::now() - 1; + let req = Request::builder() + .extension(admin_auth) + .uri(format!("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day&segmentBy=campaignId&start={}&end={}&campaignId={}&adUnit={}&adSlot={}&adSlotType=TEST_TYPE&advertiser={}&publisher={}&hostname=localhost&country=Bulgaria&osName=Windows", start_date, end_date, DUMMY_CAMPAIGN.id, DUMMY_IPFS[0], DUMMY_IPFS[1], ADDRESSES["creator"], ADDRESSES["publisher"])) + .body(Body::empty()) + .expect("Should build Request"); + + let analytics_response = analytics(req, &app, None, None) + .await + .expect("Should get analytics data"); + let json = hyper::body::to_bytes(analytics_response.into_body()) + .await + .expect("Should get json"); + + let fetched_analytics: Vec = + serde_json::from_slice(&json).expect("Should get analytics response"); + assert_eq!(fetched_analytics.len(), 1); + assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); + assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 1); + // test with no authUid + let req = Request::builder() + .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day") + .body(Body::empty()) + .expect("Should build Request"); + + let analytics_response = analytics(req, &app, None, Some("publisher".to_string())).await; + let err_msg = "auth_as_key is provided but there is no Auth object".to_string(); + assert!(matches!( + analytics_response, + Err(ResponseError::BadRequest(err_msg)) + )); } - // test for publisher - // test for advertiser - // test for admin - // test for admin with all optional keys } From 743c1c33811f10e091a3e4b1f1e86b518817802c Mon Sep 17 00:00:00 2001 From: Simeon Nakov Date: Mon, 13 Dec 2021 20:35:29 +0200 Subject: [PATCH 13/24] test progress --- primitives/src/analytics.rs | 9 ++-- sentry/src/routes/analytics.rs | 80 +++++++++++++++++++++++++++++----- 2 files changed, 74 insertions(+), 15 deletions(-) diff --git a/primitives/src/analytics.rs b/primitives/src/analytics.rs index 7d079bdc9..41b6e0e85 100644 --- a/primitives/src/analytics.rs +++ b/primitives/src/analytics.rs @@ -1,7 +1,8 @@ use crate::{sentry::DateHour, Address, CampaignId, ValidatorId, IPFS}; use chrono::{DateTime, NaiveDateTime, Utc}; use parse_display::Display; -use serde::{Deserialize, Deserializer, Serialize}; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use tokio_postgres::types::ToSql; pub const ANALYTICS_QUERY_LIMIT: u32 = 200; @@ -69,7 +70,7 @@ pub mod postgres { w: &mut BytesMut, ) -> Result> { match self { - Self::Date(datehour) => datehour.date.and_hms(datehour.hour, 0, 0).to_sql(ty, w), + Self::Date(datehour) => datehour.to_sql(ty, w), Self::Timestamp(ts) => { // Create a NaiveDateTime from the timestamp let naive = NaiveDateTime::from_timestamp(0, *ts); @@ -126,7 +127,7 @@ pub struct AnalyticsQuery { } #[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] +#[serde(untagged, rename_all = "camelCase")] pub enum AnalyticsQueryTime { Date(DateHour), Timestamp(u32), @@ -147,7 +148,7 @@ pub enum AnalyticsQueryKey { } impl AnalyticsQuery { - pub fn get_key(&self, key: &str) -> Option<&AnalyticsQueryKey> { + pub fn get_key(&self, key: &str) -> Option<&T> { match key { "campaignId" => self.campaign_id.as_ref(), "adUnit" => self.ad_unit.as_ref(), diff --git a/sentry/src/routes/analytics.rs b/sentry/src/routes/analytics.rs index 3e8f3a67c..b3089d2b3 100644 --- a/sentry/src/routes/analytics.rs +++ b/sentry/src/routes/analytics.rs @@ -51,6 +51,8 @@ pub async fn analytics( segment_by ))); } + println!("{}", segment_by); + println!("{:?}", query.country); if query.get_key(segment_by).is_none() { return Err(ResponseError::BadRequest( "SegmentBy is provided but a key is not passed".to_string(), @@ -180,7 +182,7 @@ mod test { test_util::setup_dummy_app, ValidatorId, }; - use chrono::Utc; + use chrono::{Utc, Timelike}; use primitives::{ analytics::{OperatingSystem, Timeframe}, sentry::UpdateAnalytics, @@ -189,7 +191,7 @@ mod test { async fn insert_mock_analytics(pool: &DbPool) { // analytics for NOW - let now_date = DateHour::try_from(Utc::today().and_hms(1, 0, 0)).expect("should parse"); + let now_date = DateHour::try_from(Utc::today().and_hms(Utc::now().hour(), 0, 0)).expect("should parse"); let analytics_now = UpdateAnalytics { time: now_date, campaign_id: DUMMY_CAMPAIGN.id, @@ -396,7 +398,7 @@ mod test { }; let query = serde_urlencoded::to_string(query).expect("should parse query"); let req = Request::builder() - .extension(query) + .uri(format!("http://127.0.0.1/analytics?{}", query)) .body(Body::empty()) .expect("Should build Request"); @@ -420,8 +422,27 @@ mod test { // Test with end date let end_date = DateHour::::now() - 1; + let query = AnalyticsQuery { + limit: 1000, + event_type: "CLICK".into(), + metric: Metric::Count, + timeframe: Timeframe::Day, + segment_by: None, + start: None, + end: Some(AnalyticsQueryTime::Date(end_date)), + campaign_id: None, + ad_unit: None, + ad_slot: None, + ad_slot_type: None, + advertiser: None, + publisher: None, + hostname: None, + country: None, + os_name: None + }; + let query = serde_urlencoded::to_string(query).expect("should parse query"); let req = Request::builder() - .uri(format!("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day&end={}", end_date)) + .uri(format!("http://127.0.0.1/analytics?{}", query)) .body(Body::empty()) .expect("Should build Request"); @@ -441,18 +462,36 @@ mod test { serde_json::from_slice(&json).expect("Should get analytics response"); assert_eq!(fetched_analytics.len(), 1); assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); - assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 3); + assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 2); // Test with start_date and end_date - // subtract 72 hours + // subtract 72 hours, there is an event exactly 72 hours ago so this also tests GTE let start_date = DateHour::::now() - 72; // subtract 1 hour let end_date = DateHour::::now() - 1; + let query = AnalyticsQuery { + limit: 1000, + event_type: "CLICK".into(), + metric: Metric::Count, + timeframe: Timeframe::Day, + segment_by: None, + start: Some(AnalyticsQueryTime::Date(start_date)), + end: Some(AnalyticsQueryTime::Date(end_date)), + campaign_id: None, + ad_unit: None, + ad_slot: None, + ad_slot_type: None, + advertiser: None, + publisher: None, + hostname: None, + country: None, + os_name: None + }; + let query = serde_urlencoded::to_string(query).expect("should parse query"); let req = Request::builder() - .uri(format!("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day&start={}&end={}", start_date, end_date)) + .uri(format!("http://127.0.0.1/analytics?{}", query)) .body(Body::empty()) .expect("Should build Request"); - let analytics_response = analytics( req, &app, @@ -469,11 +508,30 @@ mod test { serde_json::from_slice(&json).expect("Should get analytics response"); assert_eq!(fetched_analytics.len(), 1); assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); - assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 2); + assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 3); // Test with segment_by + let query = AnalyticsQuery { + limit: 1000, + event_type: "CLICK".into(), + metric: Metric::Count, + timeframe: Timeframe::Day, + segment_by: Some("country".into()), + start: None, + end: Some(AnalyticsQueryTime::Date(end_date)), + campaign_id: None, + ad_unit: None, + ad_slot: None, + ad_slot_type: None, + advertiser: None, + publisher: None, + hostname: None, + country: None, + os_name: None + }; + let query = serde_urlencoded::to_string(query).expect("should parse query"); let req = Request::builder() - .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day&segment_by=country&country=Bulgaria") + .uri(format!("http://127.0.0.1/analytics?{}", query)) .body(Body::empty()) .expect("Should build Request"); @@ -497,7 +555,7 @@ mod test { // Test with not allowed segment by let req = Request::builder() - .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day&segment_by=campaignId&campaignId=0x936da01f9abd4d9d80c702af85c822a8") + .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day&segmentBy=campaignId&campaignId=0x936da01f9abd4d9d80c702af85c822a8") .body(Body::empty()) .expect("Should build Request"); From e11962f2a3599cf87187def07a7c90b822ae6563 Mon Sep 17 00:00:00 2001 From: Simeon Nakov Date: Tue, 14 Dec 2021 18:27:18 +0200 Subject: [PATCH 14/24] fixed failing tests and logic + added myself as author --- primitives/Cargo.toml | 1 + primitives/src/analytics.rs | 34 +++------- sentry/Cargo.toml | 1 + sentry/src/db/analytics.rs | 6 +- sentry/src/lib.rs | 2 +- sentry/src/routes/analytics.rs | 120 ++++++++++++++++++++++----------- validator_worker/Cargo.toml | 1 + 7 files changed, 95 insertions(+), 70 deletions(-) diff --git a/primitives/Cargo.toml b/primitives/Cargo.toml index d6962ffb2..633b9a325 100644 --- a/primitives/Cargo.toml +++ b/primitives/Cargo.toml @@ -5,6 +5,7 @@ authors = [ "Ambire ", "Lachezar Lechev ", "Omidiora Samuel ", + "Simeon Nakov ", ] edition = "2021" diff --git a/primitives/src/analytics.rs b/primitives/src/analytics.rs index 41b6e0e85..e2fd7117c 100644 --- a/primitives/src/analytics.rs +++ b/primitives/src/analytics.rs @@ -1,8 +1,7 @@ use crate::{sentry::DateHour, Address, CampaignId, ValidatorId, IPFS}; use chrono::{DateTime, NaiveDateTime, Utc}; use parse_display::Display; -use serde::{Deserialize, Deserializer, Serialize, Serializer}; -use tokio_postgres::types::ToSql; +use serde::{Deserialize, Deserializer, Serialize}; pub const ANALYTICS_QUERY_LIMIT: u32 = 200; @@ -50,11 +49,9 @@ pub mod postgres { ) -> Result> { match self { Self::CampaignId(id) => id.to_sql(ty, w), - Self::AdUnit(ipfs) | Self::AdSlot(ipfs) => ipfs.to_sql(ty, w), - Self::AdSlotType(value) | Self::Hostname(value) | Self::Country(value) => { - value.to_sql(ty, w) - } - Self::Advertiser(addr) | Self::Publisher(addr) => addr.to_sql(ty, w), + Self::IPFS(ipfs) => ipfs.to_sql(ty, w), + Self::String(value) => value.to_sql(ty, w), + Self::Address(addr) => addr.to_sql(ty, w), Self::OperatingSystem(os_name) => os_name.to_sql(ty, w), } } @@ -106,23 +103,14 @@ pub struct AnalyticsQuery { pub end: Option, // #[serde(default = "default_timezone")] // pub timezone: String, - #[serde(flatten)] pub campaign_id: Option, - #[serde(flatten)] pub ad_unit: Option, - #[serde(flatten)] pub ad_slot: Option, - #[serde(flatten)] pub ad_slot_type: Option, - #[serde(flatten)] pub advertiser: Option, - #[serde(flatten)] pub publisher: Option, - #[serde(flatten)] pub hostname: Option, - #[serde(flatten)] pub country: Option, - #[serde(flatten)] pub os_name: Option, } @@ -134,21 +122,17 @@ pub enum AnalyticsQueryTime { } #[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] +#[serde(untagged, rename_all = "camelCase")] pub enum AnalyticsQueryKey { CampaignId(CampaignId), - AdUnit(IPFS), - AdSlot(IPFS), - AdSlotType(String), - Advertiser(Address), - Publisher(Address), - Hostname(String), - Country(String), + IPFS(IPFS), + String(String), + Address(Address), OperatingSystem(OperatingSystem), } impl AnalyticsQuery { - pub fn get_key(&self, key: &str) -> Option<&T> { + pub fn get_key(&self, key: &str) -> Option<&AnalyticsQueryKey> { match key { "campaignId" => self.campaign_id.as_ref(), "adUnit" => self.ad_unit.as_ref(), diff --git a/sentry/Cargo.toml b/sentry/Cargo.toml index 7ed5bc51b..7b7e40b16 100644 --- a/sentry/Cargo.toml +++ b/sentry/Cargo.toml @@ -5,6 +5,7 @@ authors = [ "Ambire ", "Lachezar Lechev ", "Omidiora Samuel ", + "Simeon Nakov ", ] edition = "2021" diff --git a/sentry/src/db/analytics.rs b/sentry/src/db/analytics.rs index 1d5cc6087..32cc86601 100644 --- a/sentry/src/db/analytics.rs +++ b/sentry/src/db/analytics.rs @@ -11,7 +11,7 @@ pub async fn get_analytics( pool: &DbPool, start_date: &AnalyticsQueryTime, query: &AnalyticsQuery, - allowed_keys: Vec, + allowed_keys: Vec<&str>, auth_as: Option, limit: u32, ) -> Result, PoolError> { @@ -32,7 +32,7 @@ pub async fn get_analytics( group_clause.push(segment_by.to_string()); } - // TODO: Is a GROUP BY clause really needed here? + // TODO: Is a GROUP BY clause really needed here as we call merge_analytics() later and get the same output let sql_query = format!( "SELECT {} FROM analytics WHERE {} ORDER BY time ASC LIMIT {}", select_clause.join(", "), @@ -71,7 +71,7 @@ fn analytics_query_params<'a>( start_date: &'a AnalyticsQueryTime, query: &'a AnalyticsQuery, auth_as: &'a Option, - allowed_keys: &[String], + allowed_keys: &[&str], ) -> (Vec, Vec<&'a (dyn ToSql + Sync)>) { let mut where_clauses: Vec = vec!["time >= $1".to_string()]; let mut params: Vec<&(dyn ToSql + Sync)> = vec![start_date]; diff --git a/sentry/src/lib.rs b/sentry/src/lib.rs index 8ee174b46..2b3a495e9 100644 --- a/sentry/src/lib.rs +++ b/sentry/src/lib.rs @@ -227,7 +227,7 @@ async fn analytics_router( analytics( req, app, - Some(vec!["country".to_string(), "ad_slot_type".to_string()]), + Some(vec!["country".to_string(), "adSlotType".to_string()]), None, ) .await diff --git a/sentry/src/routes/analytics.rs b/sentry/src/routes/analytics.rs index b3089d2b3..40ac815c4 100644 --- a/sentry/src/routes/analytics.rs +++ b/sentry/src/routes/analytics.rs @@ -51,8 +51,6 @@ pub async fn analytics( segment_by ))); } - println!("{}", segment_by); - println!("{:?}", query.country); if query.get_key(segment_by).is_none() { return Err(ResponseError::BadRequest( "SegmentBy is provided but a key is not passed".to_string(), @@ -81,7 +79,22 @@ pub async fn analytics( _ => None, }; - let allowed_keys = allowed_keys.unwrap_or_else(|| ALLOWED_KEYS.to_vec()); + let allowed_keys: Vec<&str> = allowed_keys + .unwrap_or_else(|| ALLOWED_KEYS.to_vec()) + .iter() + .map(|k| match k.as_ref() { + "campaignId" => "campaign_id", + "adUnit" => "ad_unit", + "adSlot" => "ad_slot", + "adSlotType" => "ad_slot_type", + "advertiser" => "advertiser", + "publisher" => "publisher", + "hostname" => "hostname", + "osName" => "os_name", + _ => "country", + }) + .collect(); + let analytics = get_analytics( &app.pool, &start_date, @@ -127,10 +140,9 @@ fn merge_analytics( metric: &Metric, segment: &Option, ) -> FetchedAnalytics { - let mut count = 0; - let amount = UnifiedNum::from_u64(0); match metric { Metric::Count => { + let mut count = 0; analytics .iter() .for_each(|a| count += a.payout_count.unwrap()); @@ -142,8 +154,12 @@ fn merge_analytics( } } Metric::Paid => { + let mut amount = UnifiedNum::from_u64(0); analytics.iter().for_each(|a| { - amount.checked_add(&a.payout_amount.unwrap()).unwrap(); + let amount_to_add = a.payout_amount.unwrap(); + amount = amount + .checked_add(&amount_to_add) + .expect("TODO: Use Result to handle possible overflows"); }); FetchedAnalytics { time: analytics.get(0).unwrap().time, @@ -182,16 +198,17 @@ mod test { test_util::setup_dummy_app, ValidatorId, }; - use chrono::{Utc, Timelike}; + use chrono::{Timelike, Utc}; use primitives::{ - analytics::{OperatingSystem, Timeframe}, + analytics::{AnalyticsQueryKey, OperatingSystem, Timeframe}, sentry::UpdateAnalytics, util::tests::prep_db::{ADDRESSES, DUMMY_CAMPAIGN, DUMMY_IPFS}, }; async fn insert_mock_analytics(pool: &DbPool) { // analytics for NOW - let now_date = DateHour::try_from(Utc::today().and_hms(Utc::now().hour(), 0, 0)).expect("should parse"); + let now_date = DateHour::try_from(Utc::today().and_hms(Utc::now().hour(), 0, 0)) + .expect("should parse"); let analytics_now = UpdateAnalytics { time: now_date, campaign_id: DUMMY_CAMPAIGN.id, @@ -378,7 +395,7 @@ mod test { // Test with start date let start_date = DateHour::::now() - 1; - let query = AnalyticsQuery { + let query = AnalyticsQuery { limit: 1000, event_type: "CLICK".into(), metric: Metric::Count, @@ -394,7 +411,7 @@ mod test { publisher: None, hostname: None, country: None, - os_name: None + os_name: None, }; let query = serde_urlencoded::to_string(query).expect("should parse query"); let req = Request::builder() @@ -422,7 +439,7 @@ mod test { // Test with end date let end_date = DateHour::::now() - 1; - let query = AnalyticsQuery { + let query = AnalyticsQuery { limit: 1000, event_type: "CLICK".into(), metric: Metric::Count, @@ -438,7 +455,7 @@ mod test { publisher: None, hostname: None, country: None, - os_name: None + os_name: None, }; let query = serde_urlencoded::to_string(query).expect("should parse query"); let req = Request::builder() @@ -469,7 +486,7 @@ mod test { let start_date = DateHour::::now() - 72; // subtract 1 hour let end_date = DateHour::::now() - 1; - let query = AnalyticsQuery { + let query = AnalyticsQuery { limit: 1000, event_type: "CLICK".into(), metric: Metric::Count, @@ -485,7 +502,7 @@ mod test { publisher: None, hostname: None, country: None, - os_name: None + os_name: None, }; let query = serde_urlencoded::to_string(query).expect("should parse query"); let req = Request::builder() @@ -511,14 +528,14 @@ mod test { assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 3); // Test with segment_by - let query = AnalyticsQuery { + let query = AnalyticsQuery { limit: 1000, event_type: "CLICK".into(), metric: Metric::Count, timeframe: Timeframe::Day, segment_by: Some("country".into()), start: None, - end: Some(AnalyticsQueryTime::Date(end_date)), + end: None, campaign_id: None, ad_unit: None, ad_slot: None, @@ -526,8 +543,8 @@ mod test { advertiser: None, publisher: None, hostname: None, - country: None, - os_name: None + country: Some(AnalyticsQueryKey::String("Bulgaria".into())), + os_name: None, }; let query = serde_urlencoded::to_string(query).expect("should parse query"); let req = Request::builder() @@ -551,7 +568,7 @@ mod test { serde_json::from_slice(&json).expect("Should get analytics response"); assert_eq!(fetched_analytics.len(), 1); assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); - assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 4); + assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 3); // Test with not allowed segment by let req = Request::builder() @@ -741,8 +758,9 @@ mod test { async fn insert_mock_analytics_for_auth_routes(pool: &DbPool) { // Analytics with publisher and advertiser - let now_date = DateHour::try_from(Utc::today().and_hms(1, 0, 0)).expect("should parse"); - let analytics_now = UpdateAnalytics { + let now_date = DateHour::try_from(Utc::today().and_hms(Utc::now().hour(), 0, 0)) + .expect("should parse"); + let analytics = UpdateAnalytics { time: now_date, campaign_id: DUMMY_CAMPAIGN.id, ad_unit: Some(DUMMY_IPFS[0]), @@ -757,11 +775,11 @@ mod test { amount_to_add: UnifiedNum::from_u64(1_000_000), count_to_add: 1, }; - update_analytics(pool, analytics_now) + update_analytics(pool, analytics) .await .expect("Should update analytics"); // Analytics with a different unit/slot - let analytics_now = UpdateAnalytics { + let analytics_different_slot_unit = UpdateAnalytics { time: now_date, campaign_id: DUMMY_CAMPAIGN.id, ad_unit: Some(DUMMY_IPFS[2]), @@ -776,11 +794,11 @@ mod test { amount_to_add: UnifiedNum::from_u64(1_000_000), count_to_add: 1, }; - update_analytics(pool, analytics_now) + update_analytics(pool, analytics_different_slot_unit) .await .expect("Should update analytics"); // Analytics with a different event type - let analytics_now = UpdateAnalytics { + let analytics_different_event = UpdateAnalytics { time: now_date, campaign_id: DUMMY_CAMPAIGN.id, ad_unit: Some(DUMMY_IPFS[0]), @@ -795,11 +813,11 @@ mod test { amount_to_add: UnifiedNum::from_u64(1_000_000), count_to_add: 1, }; - update_analytics(pool, analytics_now) + update_analytics(pool, analytics_different_event) .await .expect("Should update analytics"); // Analytics with no None fields - let analytics_now = UpdateAnalytics { + let analytics_all_optional_fields = UpdateAnalytics { time: now_date - 2, campaign_id: DUMMY_CAMPAIGN.id, ad_unit: Some(DUMMY_IPFS[0]), @@ -814,11 +832,11 @@ mod test { amount_to_add: UnifiedNum::from_u64(1_000_000), count_to_add: 1, }; - update_analytics(pool, analytics_now) + update_analytics(pool, analytics_all_optional_fields) .await .expect("Should update analytics"); // Analytics with different publisher - let analytics_now = UpdateAnalytics { + let analytics_different_publisher = UpdateAnalytics { time: now_date, campaign_id: DUMMY_CAMPAIGN.id, ad_unit: Some(DUMMY_IPFS[0]), @@ -833,11 +851,11 @@ mod test { amount_to_add: UnifiedNum::from_u64(1_000_000), count_to_add: 1, }; - update_analytics(pool, analytics_now) + update_analytics(pool, analytics_different_publisher) .await .expect("Should update analytics"); // Analytics with different advertiser - let analytics_now = UpdateAnalytics { + let analytics_different_advertiser = UpdateAnalytics { time: now_date, campaign_id: DUMMY_CAMPAIGN.id, ad_unit: Some(DUMMY_IPFS[0]), @@ -852,11 +870,11 @@ mod test { amount_to_add: UnifiedNum::from_u64(1_000_000), count_to_add: 1, }; - update_analytics(pool, analytics_now) + update_analytics(pool, analytics_different_advertiser) .await .expect("Should update analytics"); // Analytics with both a different publisher and advertiser - let analytics_now = UpdateAnalytics { + let analytics_different_publisher_advertiser = UpdateAnalytics { time: now_date, campaign_id: DUMMY_CAMPAIGN.id, ad_unit: Some(DUMMY_IPFS[0]), @@ -871,7 +889,7 @@ mod test { amount_to_add: UnifiedNum::from_u64(1_000_000), count_to_add: 1, }; - update_analytics(pool, analytics_now) + update_analytics(pool, analytics_different_publisher_advertiser) .await .expect("Should update analytics"); } @@ -886,7 +904,7 @@ mod test { }; let advertiser_auth = Auth { era: 0, - uid: ValidatorId::from(ADDRESSES["advertiser"]), + uid: ValidatorId::from(ADDRESSES["creator"]), }; let admin_auth = Auth { era: 0, @@ -911,7 +929,7 @@ mod test { serde_json::from_slice(&json).expect("Should get analytics response"); assert_eq!(fetched_analytics.len(), 1); assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); - assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 5); + assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 4); // test for advertiser let req = Request::builder() .extension(advertiser_auth) @@ -930,7 +948,7 @@ mod test { serde_json::from_slice(&json).expect("Should get analytics response"); assert_eq!(fetched_analytics.len(), 1); assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); - assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 5); + assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 4); // test for admin let req = Request::builder() .extension(admin_auth.clone()) @@ -949,13 +967,33 @@ mod test { serde_json::from_slice(&json).expect("Should get analytics response"); assert_eq!(fetched_analytics.len(), 1); assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); - assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 7); + assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 6); // test for admin with all optional keys let start_date = DateHour::::now() - 72; let end_date = DateHour::::now() - 1; + let query = AnalyticsQuery { + limit: 1000, + event_type: "CLICK".into(), + metric: Metric::Count, + timeframe: Timeframe::Day, + segment_by: Some("country".into()), + start: Some(AnalyticsQueryTime::Date(start_date)), + end: Some(AnalyticsQueryTime::Date(end_date)), + campaign_id: Some(AnalyticsQueryKey::CampaignId(DUMMY_CAMPAIGN.id)), + ad_unit: Some(AnalyticsQueryKey::IPFS(DUMMY_IPFS[0])), + ad_slot: Some(AnalyticsQueryKey::IPFS(DUMMY_IPFS[1])), + ad_slot_type: Some(AnalyticsQueryKey::String("TEST_TYPE".into())), + advertiser: Some(AnalyticsQueryKey::Address(ADDRESSES["creator"])), + publisher: Some(AnalyticsQueryKey::Address(ADDRESSES["publisher"])), + hostname: Some(AnalyticsQueryKey::String("localhost".into())), + country: Some(AnalyticsQueryKey::String("Bulgaria".into())), + os_name: Some(AnalyticsQueryKey::OperatingSystem(OperatingSystem::map_os( + "Windows", + ))), + }; + let query = serde_urlencoded::to_string(query).expect("should parse query"); let req = Request::builder() - .extension(admin_auth) - .uri(format!("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day&segmentBy=campaignId&start={}&end={}&campaignId={}&adUnit={}&adSlot={}&adSlotType=TEST_TYPE&advertiser={}&publisher={}&hostname=localhost&country=Bulgaria&osName=Windows", start_date, end_date, DUMMY_CAMPAIGN.id, DUMMY_IPFS[0], DUMMY_IPFS[1], ADDRESSES["creator"], ADDRESSES["publisher"])) + .uri(format!("http://127.0.0.1/analytics?{}", query)) .body(Body::empty()) .expect("Should build Request"); diff --git a/validator_worker/Cargo.toml b/validator_worker/Cargo.toml index bab44a27d..76c51739c 100644 --- a/validator_worker/Cargo.toml +++ b/validator_worker/Cargo.toml @@ -5,6 +5,7 @@ authors = [ "Ambire ", "Lachezar Lechev ", "Samparsky ", + "Simeon Nakov " ] edition = "2021" From 35942fb6132936ce52aea072c12b87f650597389 Mon Sep 17 00:00:00 2001 From: Simeon Nakov Date: Tue, 14 Dec 2021 19:04:49 +0200 Subject: [PATCH 15/24] output is represented correctly + other minor changes --- primitives/src/analytics.rs | 6 +- primitives/src/sentry.rs | 4 +- sentry/src/db/analytics.rs | 16 ++- sentry/src/routes/analytics.rs | 189 +++++++++++++++++++++------------ 4 files changed, 134 insertions(+), 81 deletions(-) diff --git a/primitives/src/analytics.rs b/primitives/src/analytics.rs index e2fd7117c..2c68ddde4 100644 --- a/primitives/src/analytics.rs +++ b/primitives/src/analytics.rs @@ -70,7 +70,7 @@ pub mod postgres { Self::Date(datehour) => datehour.to_sql(ty, w), Self::Timestamp(ts) => { // Create a NaiveDateTime from the timestamp - let naive = NaiveDateTime::from_timestamp(0, *ts); + let naive = NaiveDateTime::from_timestamp(*ts, 0); // Create a normal DateTime from the NaiveDateTime let datetime: DateTime = DateTime::from_utc(naive, Utc); datetime.date().and_hms(datetime.hour(), 0, 0).to_sql(ty, w) @@ -118,7 +118,7 @@ pub struct AnalyticsQuery { #[serde(untagged, rename_all = "camelCase")] pub enum AnalyticsQueryTime { Date(DateHour), - Timestamp(u32), + Timestamp(i64), } #[derive(Debug, Serialize, Deserialize)] @@ -325,7 +325,7 @@ where } _ => { let timestamp = date_as_str - .parse::() + .parse::() .map_err(serde::de::Error::custom)?; Ok(Some(AnalyticsQueryTime::Timestamp(timestamp))) } diff --git a/primitives/src/sentry.rs b/primitives/src/sentry.rs index 9d23e1666..9e4b5baea 100644 --- a/primitives/src/sentry.rs +++ b/primitives/src/sentry.rs @@ -203,14 +203,12 @@ pub struct Analytics { pub payout_count: u32, } -// TODO: Verify this is the needed output #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)] #[serde(rename_all = "camelCase")] pub struct FetchedAnalytics { // time is represented as a timestamp pub time: i64, - pub payout_amount: Option, - pub payout_count: Option, + pub value: UnifiedNum, // We can't know the exact segment type but it can always be represented as a string pub segment: Option, } diff --git a/sentry/src/db/analytics.rs b/sentry/src/db/analytics.rs index 32cc86601..a0ea945e5 100644 --- a/sentry/src/db/analytics.rs +++ b/sentry/src/db/analytics.rs @@ -2,6 +2,7 @@ use chrono::{DateTime, Utc}; use primitives::{ analytics::{AnalyticsQuery, AnalyticsQueryTime, AuthenticateAs, Metric}, sentry::{Analytics, FetchedAnalytics, UpdateAnalytics}, + UnifiedNum, }; use tokio_postgres::types::ToSql; @@ -22,8 +23,8 @@ pub async fn get_analytics( let mut select_clause = vec!["time".to_string()]; match &query.metric { - Metric::Paid => select_clause.push("payout_amount".to_string()), - Metric::Count => select_clause.push("payout_count".to_string()), + Metric::Paid => select_clause.push("payout_amount as value".to_string()), + Metric::Count => select_clause.push("payout_count as value".to_string()), } let mut group_clause = vec!["time".to_string()]; @@ -54,11 +55,16 @@ pub async fn get_analytics( None => None, }; let time = row.get::<_, DateTime>("time"); - + let value = match &query.metric { + Metric::Paid => row.get("value"), + Metric::Count => { + let count: i32 = row.get("value"); + UnifiedNum::from_u64(count as u64) + } + }; FetchedAnalytics { time: time.timestamp(), - payout_amount: row.try_get("payout_amount").ok(), - payout_count: row.try_get("payout_count").ok(), + value, segment: segment_value, } }) diff --git a/sentry/src/routes/analytics.rs b/sentry/src/routes/analytics.rs index 40ac815c4..7feb5e27e 100644 --- a/sentry/src/routes/analytics.rs +++ b/sentry/src/routes/analytics.rs @@ -3,9 +3,7 @@ use hyper::{Body, Request, Response}; use once_cell::sync::Lazy; use primitives::{ adapter::Adapter, - analytics::{ - AnalyticsQuery, AnalyticsQueryTime, AuthenticateAs, Metric, ANALYTICS_QUERY_LIMIT, - }, + analytics::{AnalyticsQuery, AnalyticsQueryTime, AuthenticateAs, ANALYTICS_QUERY_LIMIT}, sentry::{DateHour, FetchedAnalytics}, UnifiedNum, }; @@ -79,6 +77,7 @@ pub async fn analytics( _ => None, }; + // TODO: Clean up this logic let allowed_keys: Vec<&str> = allowed_keys .unwrap_or_else(|| ALLOWED_KEYS.to_vec()) .iter() @@ -105,16 +104,15 @@ pub async fn analytics( ) .await?; - let output = - split_entries_by_timeframe(analytics, period_in_hours, &query.metric, &query.segment_by); + let output = split_entries_by_timeframe(analytics, period_in_hours, &query.segment_by); Ok(success_response(serde_json::to_string(&output)?)) } +// TODO: This logic can be simplified or done in the SQL query fn split_entries_by_timeframe( mut analytics: Vec, period_in_hours: i64, - metric: &Metric, segment: &Option, ) -> Vec { let mut res: Vec = vec![]; @@ -124,50 +122,26 @@ fn split_entries_by_timeframe( while analytics.len() > period_in_hours { let drain_index = analytics.len() - period_in_hours; let analytics_fraction: Vec = analytics.drain(drain_index..).collect(); - let merged_analytics = merge_analytics(analytics_fraction, metric, segment); + let merged_analytics = merge_analytics(analytics_fraction, segment); res.push(merged_analytics); } if !analytics.is_empty() { - let merged_analytics = merge_analytics(analytics, metric, segment); + let merged_analytics = merge_analytics(analytics, segment); res.push(merged_analytics); } res } -fn merge_analytics( - analytics: Vec, - metric: &Metric, - segment: &Option, -) -> FetchedAnalytics { - match metric { - Metric::Count => { - let mut count = 0; - analytics - .iter() - .for_each(|a| count += a.payout_count.unwrap()); - FetchedAnalytics { - time: analytics.get(0).unwrap().time, - payout_count: Some(count), - payout_amount: None, - segment: segment.clone(), - } - } - Metric::Paid => { - let mut amount = UnifiedNum::from_u64(0); - analytics.iter().for_each(|a| { - let amount_to_add = a.payout_amount.unwrap(); - amount = amount - .checked_add(&amount_to_add) - .expect("TODO: Use Result to handle possible overflows"); - }); - FetchedAnalytics { - time: analytics.get(0).unwrap().time, - payout_count: None, - payout_amount: Some(amount), - segment: segment.clone(), - } - } +fn merge_analytics(analytics: Vec, segment: &Option) -> FetchedAnalytics { + let mut amount = UnifiedNum::from_u64(0); + analytics + .iter() + .for_each(|a| amount = amount.checked_add(&a.value).expect("TODO: Use result here")); + FetchedAnalytics { + time: analytics.get(0).unwrap().time, + value: amount, + segment: segment.clone(), } } @@ -200,7 +174,7 @@ mod test { }; use chrono::{Timelike, Utc}; use primitives::{ - analytics::{AnalyticsQueryKey, OperatingSystem, Timeframe}, + analytics::{AnalyticsQueryKey, Metric, OperatingSystem, Timeframe}, sentry::UpdateAnalytics, util::tests::prep_db::{ADDRESSES, DUMMY_CAMPAIGN, DUMMY_IPFS}, }; @@ -389,8 +363,10 @@ mod test { let fetched_analytics: Vec = serde_json::from_slice(&json).expect("Should get analytics response"); assert_eq!(fetched_analytics.len(), 1); - assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); - assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 4); + assert_eq!( + fetched_analytics.get(0).unwrap().value, + UnifiedNum::from_u64(4) + ); // Test with start date let start_date = DateHour::::now() - 1; @@ -434,8 +410,10 @@ mod test { let fetched_analytics: Vec = serde_json::from_slice(&json).expect("Should get analytics response"); assert_eq!(fetched_analytics.len(), 1); - assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); - assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 2); + assert_eq!( + fetched_analytics.get(0).unwrap().value, + UnifiedNum::from_u64(2) + ); // Test with end date let end_date = DateHour::::now() - 1; @@ -478,8 +456,10 @@ mod test { let fetched_analytics: Vec = serde_json::from_slice(&json).expect("Should get analytics response"); assert_eq!(fetched_analytics.len(), 1); - assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); - assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 2); + assert_eq!( + fetched_analytics.get(0).unwrap().value, + UnifiedNum::from_u64(2) + ); // Test with start_date and end_date // subtract 72 hours, there is an event exactly 72 hours ago so this also tests GTE @@ -524,8 +504,10 @@ mod test { let fetched_analytics: Vec = serde_json::from_slice(&json).expect("Should get analytics response"); assert_eq!(fetched_analytics.len(), 1); - assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); - assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 3); + assert_eq!( + fetched_analytics.get(0).unwrap().value, + UnifiedNum::from_u64(3) + ); // Test with segment_by let query = AnalyticsQuery { @@ -567,8 +549,10 @@ mod test { let fetched_analytics: Vec = serde_json::from_slice(&json).expect("Should get analytics response"); assert_eq!(fetched_analytics.len(), 1); - assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); - assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 3); + assert_eq!( + fetched_analytics.get(0).unwrap().value, + UnifiedNum::from_u64(3) + ); // Test with not allowed segment by let req = Request::builder() @@ -651,9 +635,8 @@ mod test { let fetched_analytics: Vec = serde_json::from_slice(&json).expect("Should get analytics response"); assert_eq!(fetched_analytics.len(), 1); - assert!(fetched_analytics.get(0).unwrap().payout_amount.is_some()); assert_eq!( - fetched_analytics.get(0).unwrap().payout_amount.unwrap(), + fetched_analytics.get(0).unwrap().value, UnifiedNum::from_u64(4_000_000) ); @@ -678,8 +661,10 @@ mod test { let fetched_analytics: Vec = serde_json::from_slice(&json).expect("Should get analytics response"); assert_eq!(fetched_analytics.len(), 1); - assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); - assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 5); + assert_eq!( + fetched_analytics.get(0).unwrap().value, + UnifiedNum::from_u64(5) + ); // Test with a limit let req = Request::builder() @@ -702,8 +687,10 @@ mod test { let fetched_analytics: Vec = serde_json::from_slice(&json).expect("Should get analytics response"); assert_eq!(fetched_analytics.len(), 1); - assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); - assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 2); + assert_eq!( + fetched_analytics.get(0).unwrap().value, + UnifiedNum::from_u64(2) + ); // Test with a month timeframe let req = Request::builder() .uri( @@ -727,8 +714,10 @@ mod test { let fetched_analytics: Vec = serde_json::from_slice(&json).expect("Should get analytics response"); assert_eq!(fetched_analytics.len(), 1); - assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); - assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 6); + assert_eq!( + fetched_analytics.get(0).unwrap().value, + UnifiedNum::from_u64(6) + ); // Test with a year timeframe let req = Request::builder() .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=year") @@ -750,10 +739,62 @@ mod test { let fetched_analytics: Vec = serde_json::from_slice(&json).expect("Should get analytics response"); assert_eq!(fetched_analytics.len(), 1); - assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); - assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 7); + assert_eq!( + fetched_analytics.get(0).unwrap().value, + UnifiedNum::from_u64(7) + ); // Test with start and end as timestamps + let start_date = DateHour::::now() - 72; + // subtract 1 hour + let end_date = DateHour::::now() - 1; + let query = AnalyticsQuery { + limit: 1000, + event_type: "CLICK".into(), + metric: Metric::Count, + timeframe: Timeframe::Day, + segment_by: None, + start: Some(AnalyticsQueryTime::Timestamp( + start_date.to_datetime().timestamp(), + )), + end: Some(AnalyticsQueryTime::Timestamp( + end_date.to_datetime().timestamp(), + )), + campaign_id: None, + ad_unit: None, + ad_slot: None, + ad_slot_type: None, + advertiser: None, + publisher: None, + hostname: None, + country: None, + os_name: None, + }; + let query = serde_urlencoded::to_string(query).expect("should parse query"); + let req = Request::builder() + .uri(format!("http://127.0.0.1/analytics?{}", query)) + .body(Body::empty()) + .expect("Should build Request"); + let analytics_response = analytics( + req, + &app, + Some(vec!["country".into(), "ad_slot_type".into()]), + None, + ) + .await + .expect("Should get analytics data"); + let json = hyper::body::to_bytes(analytics_response.into_body()) + .await + .expect("Should get json"); + + let fetched_analytics: Vec = + serde_json::from_slice(&json).expect("Should get analytics response"); + assert_eq!(fetched_analytics.len(), 1); + assert_eq!( + fetched_analytics.get(0).unwrap().value, + UnifiedNum::from_u64(3) + ); + // Test with timeframe=day and start_date= 2 or more days ago to check if the results vec is split properly } async fn insert_mock_analytics_for_auth_routes(pool: &DbPool) { @@ -928,8 +969,10 @@ mod test { let fetched_analytics: Vec = serde_json::from_slice(&json).expect("Should get analytics response"); assert_eq!(fetched_analytics.len(), 1); - assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); - assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 4); + assert_eq!( + fetched_analytics.get(0).unwrap().value, + UnifiedNum::from_u64(4) + ); // test for advertiser let req = Request::builder() .extension(advertiser_auth) @@ -947,8 +990,10 @@ mod test { let fetched_analytics: Vec = serde_json::from_slice(&json).expect("Should get analytics response"); assert_eq!(fetched_analytics.len(), 1); - assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); - assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 4); + assert_eq!( + fetched_analytics.get(0).unwrap().value, + UnifiedNum::from_u64(4) + ); // test for admin let req = Request::builder() .extension(admin_auth.clone()) @@ -966,8 +1011,10 @@ mod test { let fetched_analytics: Vec = serde_json::from_slice(&json).expect("Should get analytics response"); assert_eq!(fetched_analytics.len(), 1); - assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); - assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 6); + assert_eq!( + fetched_analytics.get(0).unwrap().value, + UnifiedNum::from_u64(6) + ); // test for admin with all optional keys let start_date = DateHour::::now() - 72; let end_date = DateHour::::now() - 1; @@ -1007,8 +1054,10 @@ mod test { let fetched_analytics: Vec = serde_json::from_slice(&json).expect("Should get analytics response"); assert_eq!(fetched_analytics.len(), 1); - assert!(fetched_analytics.get(0).unwrap().payout_count.is_some()); - assert_eq!(fetched_analytics.get(0).unwrap().payout_count.unwrap(), 1); + assert_eq!( + fetched_analytics.get(0).unwrap().value, + UnifiedNum::from_u64(1) + ); // test with no authUid let req = Request::builder() .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day") From 38ccc0379e86027fb81bfeac463fc10757566fa4 Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Thu, 16 Dec 2021 14:50:56 +0200 Subject: [PATCH 16/24] Cargo - patch postgres-types --- Cargo.lock | 1153 +++++++++++++++++++++++----------------------------- Cargo.toml | 3 + 2 files changed, 510 insertions(+), 646 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5fa06c675..0fd3b8c8f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -22,7 +22,7 @@ dependencies = [ "serde", "serde-hex", "serde_json", - "sha2 0.9.3", + "sha2 0.9.8", "thiserror", "tiny-keccak 1.5.0", "tokio", @@ -32,9 +32,9 @@ dependencies = [ [[package]] name = "addr2line" -version = "0.14.1" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a55f82cfe485775d02112886f4169bde0c5894d75e79ead7eafe7e40a25e45f7" +checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b" dependencies = [ "gimli", ] @@ -54,7 +54,7 @@ dependencies = [ "num-integer", "once_cell", "primitives", - "rand 0.8.3", + "rand 0.8.4", "reqwest", "serde", "serde_json", @@ -63,69 +63,15 @@ dependencies = [ "url", ] -[[package]] -name = "aes" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54eb1d8fe354e5fc611daf4f2ea97dd45a765f4f1e4512306ec183ae2e8f20c9" -dependencies = [ - "aes-soft", - "aesni", - "block-cipher-trait", -] - -[[package]] -name = "aes-ctr" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2e5b0458ea3beae0d1d8c0f3946564f8e10f90646cf78c06b4351052058d1ee" -dependencies = [ - "aes-soft", - "aesni", - "ctr", - "stream-cipher", -] - -[[package]] -name = "aes-soft" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfd7e7ae3f9a1fb5c03b389fc6bb9a51400d0c13053f0dca698c832bfd893a0d" -dependencies = [ - "block-cipher-trait", - "byteorder 1.4.3", - "opaque-debug 0.2.3", -] - -[[package]] -name = "aesni" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f70a6b5f971e473091ab7cfb5ffac6cde81666c4556751d8d5620ead8abf100" -dependencies = [ - "block-cipher-trait", - "opaque-debug 0.2.3", - "stream-cipher", -] - [[package]] name = "aho-corasick" -version = "0.7.15" +version = "0.7.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7404febffaa47dac81aa44dba71523c9d069b1bdc50a77db41195149e17f68e5" +checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" dependencies = [ "memchr", ] -[[package]] -name = "ansi_term" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b" -dependencies = [ - "winapi 0.3.9", -] - [[package]] name = "ansi_term" version = "0.12.1" @@ -137,9 +83,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.38" +version = "1.0.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afddf7f520a80dbf76e6f50a35bca42a2331ef227a28b3b6dc5c2e2338d114b1" +checksum = "8b26702f315f53b6071259e15dd9d64528213b44d61de1ec926eca7715d62203" [[package]] name = "array-init" @@ -173,9 +119,9 @@ checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" [[package]] name = "arrayvec" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4dc07131ffa69b8072d35f5007352af944213cde02545e2103680baed38fcd" +checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" [[package]] name = "async-channel" @@ -190,16 +136,16 @@ dependencies = [ [[package]] name = "async-executor" -version = "1.4.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb877970c7b440ead138f6321a3b5395d6061183af779340b65e20c0fede9146" +checksum = "871f9bb5e0a22eeb7e8cf16641feb87c9dc67032ccf8ff49e772eb9941d3a965" dependencies = [ "async-task", "concurrent-queue", "fastrand", "futures-lite", "once_cell", - "vec-arena", + "slab", ] [[package]] @@ -220,29 +166,28 @@ dependencies = [ [[package]] name = "async-io" -version = "1.3.1" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9315f8f07556761c3e48fec2e6b276004acf426e6dc068b2c2251854d65ee0fd" +checksum = "a811e6a479f2439f0c04038796b5cfb3d2ad56c230e0f2d3f7b04d68cfee607b" dependencies = [ "concurrent-queue", - "fastrand", "futures-lite", "libc", "log", - "nb-connect", "once_cell", "parking", "polling", - "vec-arena", + "slab", + "socket2", "waker-fn", "winapi 0.3.9", ] [[package]] name = "async-lock" -version = "2.3.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1996609732bde4a9988bc42125f55f2af5f3c36370e27c778d5191a4a1b63bfb" +checksum = "e6a8ea61bf9947a1007c5cada31e647dbc77b103c679858150003ba697ea798b" dependencies = [ "event-listener", ] @@ -258,9 +203,9 @@ dependencies = [ [[package]] name = "async-std" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9f06685bad74e0570f5213741bea82158279a4103d988e57bfada11ad230341" +checksum = "f8056f1455169ab86dd47b47391e4ab0cbd25410a70e9fe675544f49bafaf952" dependencies = [ "async-channel", "async-global-executor", @@ -291,9 +236,9 @@ checksum = "e91831deabf0d6d7ec49552e489aed63b7456a7a3c46cff62adad428110b0af0" [[package]] name = "async-trait" -version = "0.1.48" +version = "0.1.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36ea56748e10732c49404c153638a15ec3d6211ec5ff35d9bb20e13b93576adf" +checksum = "061a7acccaa286c011ddc30970520b98fa40e00c9d644633fb26b5fc63a265e3" dependencies = [ "proc-macro2", "quote", @@ -331,11 +276,12 @@ checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" [[package]] name = "backtrace" -version = "0.3.56" +version = "0.3.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d117600f438b1707d4e4ae15d3595657288f8235a0eb593e80ecc98ab34e1bc" +checksum = "321629d8ba6513061f26707241fa9bc89524ff1cd7a915a97ef0c62c666ce1b6" dependencies = [ "addr2line", + "cc", "cfg-if 1.0.0", "libc", "miniz_oxide", @@ -357,15 +303,15 @@ checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" [[package]] name = "bitflags" -version = "1.2.1" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitvec" -version = "0.20.2" +version = "0.20.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f682656975d3a682daff957be4ddeb65d6ad656737cd821f2d00685ae466af1" +checksum = "7774144344a4faa177370406a7ff5f1da24303817368584c6206c8303eb07848" dependencies = [ "funty", "radium", @@ -397,9 +343,9 @@ dependencies = [ [[package]] name = "blake3" -version = "0.3.7" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9ff35b701f3914bdb8fad3368d822c766ef2858b2583198e41639b936f09d3f" +checksum = "b64485778c4f16a6a5a9d335e80d449ac6c70cdd6a06d2af18a6f6f775a125b3" dependencies = [ "arrayref", "arrayvec 0.5.2", @@ -410,16 +356,6 @@ dependencies = [ "digest 0.9.0", ] -[[package]] -name = "block-buffer" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a076c298b9ecdb530ed9d967e74a6027d6a7478924520acddcddc24c1c8ab3ab" -dependencies = [ - "arrayref", - "byte-tools 0.2.0", -] - [[package]] name = "block-buffer" version = "0.7.3" @@ -427,7 +363,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" dependencies = [ "block-padding 0.1.5", - "byte-tools 0.3.1", + "byte-tools", "byteorder 1.4.3", "generic-array 0.12.4", ] @@ -443,22 +379,12 @@ dependencies = [ ] [[package]] -name = "block-cipher-trait" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c924d49bd09e7c06003acda26cd9742e796e34282ec6c1189404dee0c1f4774" -dependencies = [ - "generic-array 0.12.4", -] - -[[package]] -name = "block-modes" -version = "0.3.3" +name = "block-buffer" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31aa8410095e39fdb732909fb5730a48d5bd7c2e3cd76bd1b07b3dbea130c529" +checksum = "f1d36a02058e76b040de25a4464ba1c80935655595b661505c8b39b664828b95" dependencies = [ - "block-cipher-trait", - "block-padding 0.1.5", + "generic-array 0.14.4", ] [[package]] @@ -467,7 +393,7 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5" dependencies = [ - "byte-tools 0.3.1", + "byte-tools", ] [[package]] @@ -478,9 +404,9 @@ checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae" [[package]] name = "blocking" -version = "1.0.2" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5e170dbede1f740736619b776d7251cb1b9095c435c34d8ca9f57fcd2f335e9" +checksum = "046e47d4b2d391b1f6f8b407b1deb8dee56c1852ccd868becf2710f601b5f427" dependencies = [ "async-channel", "async-task", @@ -492,21 +418,15 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.6.1" +version = "3.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63396b8a4b9de3f4fdfb320ab6080762242f66a8ef174c49d8e19b674db4cdbe" +checksum = "8f1e260c3a9040a7c19a12468758f4c16f31a81a1fe087482be9570ec864bb6c" [[package]] name = "byte-slice-cast" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65c1bf4a04a88c54f589125563643d773f3254b5c38571395e2b591c693bbc81" - -[[package]] -name = "byte-tools" -version = "0.2.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "560c32574a12a89ecd91f5e742165893f86e3ab98d21f8ea548658eb9eef5f40" +checksum = "1d30c751592b77c499e7bce34d99d67c2c11bdc0574e9a488ddade14150a4698" [[package]] name = "byte-tools" @@ -528,9 +448,9 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" -version = "1.0.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b700ce4376041dcd0a327fd0097c41095743c4c8af8887265942faf1100bd040" +checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" [[package]] name = "cache-padded" @@ -540,9 +460,9 @@ checksum = "631ae5198c9be5e753e5cc215e1bd73c2b466a3565173db433f52bb9d3e66dba" [[package]] name = "cc" -version = "1.0.67" +version = "1.0.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3c69b077ad434294d3ce9f1f6143a2a4b89a8a2d54ef813d85003a4fd1137fd" +checksum = "22a9137b95ea06864e018375b72adfb7db6e6f68cfc8df5a04d00288050485ee" [[package]] name = "cfg-if" @@ -583,11 +503,11 @@ dependencies = [ [[package]] name = "clap" -version = "2.33.3" +version = "2.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002" +checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" dependencies = [ - "ansi_term 0.11.0", + "ansi_term", "atty", "bitflags", "strsim 0.8.0", @@ -607,15 +527,16 @@ dependencies = [ [[package]] name = "combine" -version = "4.6.0" +version = "4.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2d47c1b11006b87e492b53b313bb699ce60e16613c4dddaa91f8f7c220ab2fa" +checksum = "b2b2f5d0ee456f3928812dfc8c6d9a1d592b98678f6d56db9b0cd2b7bc6c8db5" dependencies = [ "bytes", - "futures-util", + "futures-core", "memchr", "pin-project-lite", "tokio", + "tokio-util", ] [[package]] @@ -655,11 +576,17 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + [[package]] name = "core-foundation" -version = "0.9.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a89e2ae426ea83155dccf10c0fa6b1463ef6d5fcb44cee0b224a408fa640a62" +checksum = "6888e10551bb93e424d8df1d07f1a8b4fceb0001a3a4b048bfc47554946f47b3" dependencies = [ "core-foundation-sys", "libc", @@ -667,15 +594,18 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea221b5284a47e40033bf9b66f35f984ec0ea2931eb03505246cd27a963f981b" +checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" [[package]] -name = "cpuid-bool" -version = "0.1.2" +name = "cpufeatures" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8aebca1129a03dc6dc2b127edd729435bbc4a37e1d5f4d7513165089ceb02634" +checksum = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469" +dependencies = [ + "libc", +] [[package]] name = "create2" @@ -689,9 +619,9 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dca26ee1f8d361640700bde38b2c37d8c22b3ce2d360e1fc1c74ea4b0aa7d775" +checksum = "06ed27e177f16d65f0f0c22a213e17c696ace5dd64b14258b52f9417ccb52db4" dependencies = [ "cfg-if 1.0.0", "crossbeam-utils", @@ -699,9 +629,9 @@ dependencies = [ [[package]] name = "crossbeam-deque" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94af6efb46fef72616855b036a624cf27ba656ffc9be1b9a3c931cfc7749a9a9" +checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e" dependencies = [ "cfg-if 1.0.0", "crossbeam-epoch", @@ -710,9 +640,9 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.3" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2584f639eb95fea8c798496315b297cf81b9b58b6d30ab066a75455333cf4b12" +checksum = "4ec02e091aa634e2c3ada4a392989e7c3116673ef0ac5b72232439094d73b7fd" dependencies = [ "cfg-if 1.0.0", "crossbeam-utils", @@ -723,9 +653,9 @@ dependencies = [ [[package]] name = "crossbeam-queue" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f6cb3c7f5b8e51bc3ebb73a2327ad4abdbd119dc13223f14f961d2f38486756" +checksum = "9b10ddc024425c88c2ad148c1b0fd53f4c6d38db9697c9f1588381212fa657c9" dependencies = [ "cfg-if 1.0.0", "crossbeam-utils", @@ -733,11 +663,10 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7e9d99fa91428effe99c5c6d4634cdeba32b8cf784fc428a2a687f61a952c49" +checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db" dependencies = [ - "autocfg 1.0.1", "cfg-if 1.0.0", "lazy_static", ] @@ -755,13 +684,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" [[package]] -name = "crypto-mac" -version = "0.6.2" +name = "crypto-common" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7afa06d05a046c7a47c3a849907ec303504608c927f4e85f7bfff22b7180d971" +checksum = "683d6b536309245c849479fba3da410962a43ed8e51c26b729208ec0ac2798d0" dependencies = [ - "constant_time_eq", - "generic-array 0.9.1", + "generic-array 0.14.4", ] [[package]] @@ -776,9 +704,9 @@ dependencies = [ [[package]] name = "crypto-mac" -version = "0.10.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4857fd85a0c34b3c3297875b747c1e02e06b6a0ea32dd892d8192b9ce0813ea6" +checksum = "b1d1a86f49236c215f271d40892d5fc950490551400b02ef360692c29815c714" dependencies = [ "generic-array 0.14.4", "subtle", @@ -786,29 +714,19 @@ dependencies = [ [[package]] name = "ctor" -version = "0.1.19" +version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8f45d9ad417bcef4817d614a501ab55cdd96a6fdb24f49aab89a54acfd66b19" +checksum = "ccc0a48a9b826acdf4028595adc9db92caea352f7af011a3034acd172a52a0aa" dependencies = [ "quote", "syn", ] -[[package]] -name = "ctr" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "022cd691704491df67d25d006fe8eca083098253c4d43516c2206479c58c6736" -dependencies = [ - "block-cipher-trait", - "stream-cipher", -] - [[package]] name = "darling" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "757c0ded2af11d8e739c4daea1ac623dd1624b06c844cf3f5a39f1bdbd99bb12" +checksum = "d0d720b8683f8dd83c65155f0530560cba68cd2bf395f6513a483caee57ff7f4" dependencies = [ "darling_core", "darling_macro", @@ -816,9 +734,9 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c34d8efb62d0c2d7f60ece80f75e5c63c1588ba68032740494b0b9a996466e3" +checksum = "7a340f241d2ceed1deb47ae36c4144b2707ec7dd0b649f894cb39bb595986324" dependencies = [ "fnv", "ident_case", @@ -830,9 +748,9 @@ dependencies = [ [[package]] name = "darling_macro" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ade7bff147130fe5e6d39f089c6bd49ec0250f35d70b2eebf72afdfc919f15cc" +checksum = "72c41b3b7352feb3211a0d743dc5700a4e3b60f51bd2b368892d1e0f9a95f44b" dependencies = [ "darling_core", "quote", @@ -857,9 +775,9 @@ checksum = "3ee2393c4a91429dffb4bedf19f4d6abf27d8a732c8ce4980305d782e5426d57" [[package]] name = "data-encoding-macro" -version = "0.1.10" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a94feec3d2ba66c0b6621bca8bc6f68415b1e5c69af3586fdd0af9fd9f29b17" +checksum = "86927b7cd2fe88fa698b87404b287ab98d1a0063a34071d92e575b72d3029aca" dependencies = [ "data-encoding", "data-encoding-macro-internal", @@ -867,9 +785,9 @@ dependencies = [ [[package]] name = "data-encoding-macro-internal" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0f83e699727abca3c56e187945f303389590305ab2f0185ea445aa66e8d5f2a" +checksum = "a5bbed42daaa95e780b60a50546aa345b8413a1e46f9a40a12907d3598f038db" dependencies = [ "data-encoding", "syn", @@ -891,9 +809,9 @@ dependencies = [ [[package]] name = "deadpool" -version = "0.8.0" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2d41e5afb1b1e89cc7204eb9ca99e05ba6fc810962957e7fe285d5024ed5890" +checksum = "ef82259c587bceda08349f28ff00f69ae4c897898f254140af6021eb218e8232" dependencies = [ "async-trait", "config 0.11.0", @@ -910,7 +828,7 @@ checksum = "1586368a579e26f158266d0a415bcb5df04a64b65913b2a31715d97987bd4d44" dependencies = [ "async-trait", "config 0.11.0", - "deadpool 0.8.0", + "deadpool 0.8.2", "futures", "log", "serde", @@ -920,12 +838,14 @@ dependencies = [ [[package]] name = "derive_more" -version = "0.99.11" +version = "0.99.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41cb0e6161ad61ed084a36ba71fbba9e3ac5aee3606fb607fe08da6acbcf3d8c" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ + "convert_case", "proc-macro2", "quote", + "rustc_version 0.4.0", "syn", ] @@ -937,29 +857,32 @@ checksum = "0e25ea47919b1560c4e3b7fe0aaab9becf5b84a10325ddf7db0f0ba5e1026499" [[package]] name = "digest" -version = "0.7.6" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03b072242a8cbaf9c145665af9d250c59af3b958f83ed6824e13533cf76d5b90" +checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" dependencies = [ - "generic-array 0.9.1", + "generic-array 0.12.4", ] [[package]] name = "digest" -version = "0.8.1" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" +checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" dependencies = [ - "generic-array 0.12.4", + "generic-array 0.14.4", ] [[package]] name = "digest" -version = "0.9.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" +checksum = "b697d66081d42af4fba142d56918a3cb21dc8eb63372c6b85d14f44fb9c5979b" dependencies = [ + "block-buffer 0.10.0", + "crypto-common", "generic-array 0.14.4", + "subtle", ] [[package]] @@ -985,9 +908,9 @@ dependencies = [ [[package]] name = "dtoa" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88d7ed2934d741c6b37e33e3832298e8850b53fd2d2bea03873375596c7cea4e" +checksum = "56899898ce76aaf4a0f24d914c97ea6ed976d42fec6ad33fcbb0a1103e07b2b0" [[package]] name = "edit-distance" @@ -1003,9 +926,9 @@ checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" [[package]] name = "encoding_rs" -version = "0.8.28" +version = "0.8.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80df024fbc5ac80f87dfef0d9f5209a252f2a497f7f42944cff24d8253cac065" +checksum = "7896dc8abb250ffdda33912550faa54c88ec8b998dec0b2c55ab224921ce11df" dependencies = [ "cfg-if 1.0.0", ] @@ -1062,7 +985,7 @@ dependencies = [ "serde_json", "sha3 0.9.1", "thiserror", - "uint 0.9.0", + "uint 0.9.1", ] [[package]] @@ -1080,9 +1003,9 @@ dependencies = [ [[package]] name = "ethbloom" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "779864b9c7f7ead1f092972c3257496c6a84b46dba2ce131dd8a282cb2cc5972" +checksum = "bfb684ac8fa8f6c5759f788862bb22ec6fe3cb392f6bfd08e3c64b603661e3f8" dependencies = [ "crunchy 0.2.2", "fixed-hash 0.7.0", @@ -1111,12 +1034,12 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f64b5df66a228d85e4b17e5d6c6aa43b0310898ffe8a85988c4c032357aaabfd" dependencies = [ - "ethbloom 0.11.0", + "ethbloom 0.11.1", "fixed-hash 0.7.0", "impl-rlp", "impl-serde", "primitive-types", - "uint 0.9.0", + "uint 0.9.1", ] [[package]] @@ -1189,12 +1112,6 @@ dependencies = [ "rand 0.6.5", ] -[[package]] -name = "fake-simd" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" - [[package]] name = "fallible-iterator" version = "0.2.0" @@ -1203,9 +1120,9 @@ checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" [[package]] name = "fastrand" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca5faf057445ce5c9d4329e382b2ce7ca38550ef3b73a5348362d5f24e0c7fe3" +checksum = "b394ed3d285a429378d3b384b9eb1285267e7df4b166df24b7a6939a04dc392e" dependencies = [ "instant", ] @@ -1228,7 +1145,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfcf0ed7fe52a17a03854ec54a9f76d6d84508d1c0e66bc1793301c73fc8493c" dependencies = [ "byteorder 1.4.3", - "rand 0.8.3", + "rand 0.8.4", "rustc-hex 2.1.0", "static_assertions", ] @@ -1278,9 +1195,9 @@ checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" [[package]] name = "futures" -version = "0.3.13" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f55667319111d593ba876406af7c409c0ebb44dc4be6132a783ccf163ea14c1" +checksum = "a12aa0eb539080d55c3f2d45a67c3b58b6b0773c1a3ca2dfec66d58c97fd66ca" dependencies = [ "futures-channel", "futures-core", @@ -1293,9 +1210,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.13" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c2dd2df839b57db9ab69c2c9d8f3e8c81984781937fe2807dc6dcf3b2ad2939" +checksum = "5da6ba8c3bb3c165d3c7319fc1cc8304facf1fb8db99c5de877183c08a273888" dependencies = [ "futures-core", "futures-sink", @@ -1303,15 +1220,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.13" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15496a72fabf0e62bdc3df11a59a3787429221dd0710ba8ef163d6f7a9112c94" +checksum = "88d1c26957f23603395cd326b0ffe64124b818f4449552f960d815cfba83a53d" [[package]] name = "futures-executor" -version = "0.3.13" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "891a4b7b96d84d5940084b2a37632dd65deeae662c114ceaa2c879629c9c0ad1" +checksum = "45025be030969d763025784f7f355043dc6bc74093e4ecc5000ca4dc50d8745c" dependencies = [ "futures-core", "futures-task", @@ -1320,15 +1237,15 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.13" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71c2c65c57704c32f5241c1223167c2c3294fd34ac020c807ddbe6db287ba59" +checksum = "522de2a0fe3e380f1bc577ba0474108faf3f6b18321dbf60b3b9c39a75073377" [[package]] name = "futures-lite" -version = "1.11.3" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4481d0cd0de1d204a4fa55e7d45f07b1d958abcb06714b3446438e2eff695fb" +checksum = "7694489acd39452c77daa48516b894c153f192c3578d5a839b62c58099fcbf48" dependencies = [ "fastrand", "futures-core", @@ -1341,10 +1258,11 @@ dependencies = [ [[package]] name = "futures-macro" -version = "0.3.13" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea405816a5139fb39af82c2beb921d52143f556038378d6db21183a5c37fbfb7" +checksum = "18e4a4b95cea4b4ccbcf1c5675ca7c4ee4e9e75eb79944d07defde18068f79bb" dependencies = [ + "autocfg 1.0.1", "proc-macro-hack", "proc-macro2", "quote", @@ -1353,15 +1271,15 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.13" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85754d98985841b7d4f5e8e6fbfa4a4ac847916893ec511a2917ccd8525b8bb3" +checksum = "36ea153c13024fe480590b3e3d4cad89a0cfacecc24577b68f86c6ced9c2bc11" [[package]] name = "futures-task" -version = "0.3.13" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa189ef211c15ee602667a6fcfe1c1fd9e07d42250d2156382820fba33c9df80" +checksum = "1d3d00f4eddb73e498a54394f228cd55853bdf059259e8e7bc6e69d408892e99" [[package]] name = "futures-timer" @@ -1371,10 +1289,11 @@ checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" [[package]] name = "futures-util" -version = "0.3.13" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1812c7ab8aedf8d6f2701a43e1243acdbcc2b36ab26e2ad421eb99ac963d96d1" +checksum = "36568465210a3a6ee45e1f165136d68671471a501e632e9a98d96872222b5481" dependencies = [ + "autocfg 1.0.1", "futures-channel", "futures-core", "futures-io", @@ -1395,15 +1314,6 @@ version = "0.3.55" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f5f3913fa0bfe7ee1fd8248b6b9f42a5af4b9d65ec2dd2c3c26132b950ecfc2" -[[package]] -name = "generic-array" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d00328cedcac5e81c683e5620ca6a30756fc23027ebf9bff405c0e8da1fbb7e" -dependencies = [ - "typenum", -] - [[package]] name = "generic-array" version = "0.12.4" @@ -1436,9 +1346,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9495705279e7140bf035dde1f6e750c162df8b625267cd52cc44e0b156732c8" +checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753" dependencies = [ "cfg-if 1.0.0", "libc", @@ -1447,15 +1357,15 @@ dependencies = [ [[package]] name = "gimli" -version = "0.23.0" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6503fe142514ca4799d4c26297c4248239fe8838d827db6bd6065c6ed29a6ce" +checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4" [[package]] name = "gloo-timers" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47204a46aaff920a1ea58b11d03dec6f704287d27561724a4631e450654a891f" +checksum = "6f16c88aa13d2656ef20d1c042086b8767bbe2bdb62526894275a1b062161b2e" dependencies = [ "futures-channel", "futures-core", @@ -1466,9 +1376,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.1" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d832b01df74254fe364568d6ddc294443f61cbec82816b60904303af87efae78" +checksum = "8f072413d126e57991455e0a922b31e4c8ba7c2ffbebf6b78b4f8521397d65cd" dependencies = [ "bytes", "fnv", @@ -1485,24 +1395,24 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.9.1" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04" +checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" [[package]] name = "headers" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0b7591fb62902706ae8e7aaff416b1b0fa2c0fd0878b46dc13baa3712d8a855" +checksum = "a4c4eb0471fcb85846d8b0690695ef354f9afb11cb03cac2e1d7c9253351afb0" dependencies = [ "base64", "bitflags", "bytes", "headers-core", "http", + "httpdate", "mime", "sha-1", - "time", ] [[package]] @@ -1525,9 +1435,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.1.18" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "322f4de77956e22ed0e5032c359a0f1273f1f7f0d79bfa3b8ffbc730d7fbcc5c" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" dependencies = [ "libc", ] @@ -1540,50 +1450,50 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "hmac" -version = "0.6.3" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "733e1b3ac906631ca01ebb577e9bb0f5e37a454032b9036b5eaea4013ed6f99a" +checksum = "2a2a2320eb7ec0ebe8da8f744d7812d9fc4cb4d09344ac01898dbcb6a20ae69b" dependencies = [ - "crypto-mac 0.6.2", - "digest 0.7.6", + "crypto-mac 0.11.1", + "digest 0.9.0", ] [[package]] name = "hmac" -version = "0.10.1" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1441c6b1e930e2817404b5046f1f989899143a12bf92de603b69f4e0aee1e15" +checksum = "ddca131f3e7f2ce2df364b57949a9d47915cfbd35e46cfee355ccebbf794d6a2" dependencies = [ - "crypto-mac 0.10.0", - "digest 0.9.0", + "digest 0.10.1", ] [[package]] name = "http" -version = "0.2.3" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7245cd7449cc792608c3c8a9eaf69bd4eabbabf802713748fd739c98b82f0747" +checksum = "1323096b05d41827dadeaee54c9981958c0f94e670bc94ed80037d1a7b8b186b" dependencies = [ "bytes", "fnv", - "itoa", + "itoa 0.4.8", ] [[package]] name = "http-body" -version = "0.4.0" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2861bd27ee074e5ee891e8b539837a9430012e249d7f0ca2d795650f579c1994" +checksum = "1ff4f84919677303da5f147645dbea6b1881f368d03ac84e1dc09031ebd7b2c6" dependencies = [ "bytes", "http", + "pin-project-lite", ] [[package]] name = "http-types" -version = "2.11.1" +version = "2.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad077d89137cd3debdce53c66714dc536525ef43fe075d41ddc0a8ac11f85957" +checksum = "6e9b187a72d63adbfba487f48095306ac823049cb504ee195541e91c7775f5ad" dependencies = [ "anyhow", "async-channel", @@ -1602,21 +1512,21 @@ dependencies = [ [[package]] name = "httparse" -version = "1.3.5" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "615caabe2c3160b313d52ccc905335f4ed5f10881dd63dc5699d47e90be85691" +checksum = "acd94fdbe1d4ff688b67b04eee2e17bd50995534a61539e45adfefb45e5e5503" [[package]] name = "httpdate" -version = "0.3.2" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "494b4d60369511e7dea41cf646832512a94e542f68bb9c49e54518e0f468eb47" +checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" [[package]] name = "hyper" -version = "0.14.4" +version = "0.14.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8e946c2b1349055e0b72ae281b238baf1a3ea7307c7e9f9d64673bdd9c26ac7" +checksum = "b7ec3e62bdc98a2f0393a5048e4c30ef659440ea6e0e572965103e72bd836f55" dependencies = [ "bytes", "futures-channel", @@ -1627,9 +1537,9 @@ dependencies = [ "http-body", "httparse", "httpdate", - "itoa", - "pin-project", - "socket2 0.3.19", + "itoa 0.4.8", + "pin-project-lite", + "socket2", "tokio", "tower-service", "tracing", @@ -1657,9 +1567,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89829a5d69c23d348314a7ac337fe39173b61149a9864deabd260983aed48c21" +checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" dependencies = [ "matches", "unicode-bidi", @@ -1686,9 +1596,9 @@ dependencies = [ [[package]] name = "impl-serde" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b47ca4d2b6931707a55fce5cf66aff80e2178c8b63bbb4ecb5695cbc870ddf6f" +checksum = "4551f042f3438e64dbd6226b20527fc84a6e1fe65688b58746a2f53623f25f5c" dependencies = [ "serde", ] @@ -1706,9 +1616,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "1.6.2" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "824845a0bf897a9042383849b02c1bc219c2383772efcd5c6f9766fa4b81aef3" +checksum = "bc633605454125dec4b66843673f01c7df2b89479b32e0ed634e43a91cff62a5" dependencies = [ "autocfg 1.0.1", "hashbrown", @@ -1722,18 +1632,18 @@ checksum = "64e9829a50b42bb782c1df523f78d332fe371b10c661e78b7a3c34b0198e9fac" [[package]] name = "instant" -version = "0.1.9" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61124eeebbd69b8190558df225adf7e4caafce0d743919e5d6b19652314ec5ec" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" dependencies = [ "cfg-if 1.0.0", ] [[package]] name = "ipnet" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47be2f14c678be2fdcab04ab1171db51b2762ce6f0a8ee87c8dd4a04ed216135" +checksum = "68f2d64f2edebec4ce84ad108148e67e1064789bee435edc5b60ad398714a3a9" [[package]] name = "itertools" @@ -1746,15 +1656,21 @@ dependencies = [ [[package]] name = "itoa" -version = "0.4.7" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" + +[[package]] +name = "itoa" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd25036021b0de88a0aff6b850051563c6516d0bf53f8638938edbb9de732736" +checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" [[package]] name = "js-sys" -version = "0.3.48" +version = "0.3.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc9f84f9b115ce7843d60706df1422a916680bfdfcbdb0447c5614ff9d7e4d78" +checksum = "7cc9ffccd38c451a86bf13657df244e9c3f37493cce8e5e21e940963777acc84" dependencies = [ "wasm-bindgen", ] @@ -1807,9 +1723,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "lexical-core" -version = "0.7.5" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21f866863575d0e1d654fbeeabdc927292fdf862873dc3c96c6f753357e13374" +checksum = "6607c62aa161d23d17a9072cc5da0be67cdfc89d3afb1e8d9c842bebc2525ffe" dependencies = [ "arrayvec 0.5.2", "bitflags", @@ -1820,9 +1736,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.88" +version = "0.2.112" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03b07a082330a35e43f63177cc01689da34fbffa0105e1246cf0311472cac73a" +checksum = "1b03d17f364a3a042d5e5d46b053bbbf82c92c9430c592dd4c064dc6ee997125" [[package]] name = "lock_api" @@ -1836,9 +1752,9 @@ dependencies = [ [[package]] name = "lock_api" -version = "0.4.2" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd96ffd135b2fd7b973ac026d28085defbe8983df057ced3eb4f2130b0831312" +checksum = "712a4d093c9976e24e7dbca41db895dabcbac38eb5f4045393d17a95bdfb1109" dependencies = [ "scopeguard 1.1.0", ] @@ -1855,9 +1771,9 @@ dependencies = [ [[package]] name = "matches" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" +checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" [[package]] name = "maybe-uninit" @@ -1876,11 +1792,20 @@ dependencies = [ "opaque-debug 0.3.0", ] +[[package]] +name = "md-5" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6a38fc55c8bbc10058782919516f88826e70320db6d206aebc49611d24216ae" +dependencies = [ + "digest 0.10.1", +] + [[package]] name = "memchr" -version = "2.3.4" +version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525" +checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" [[package]] name = "memmap" @@ -1894,9 +1819,9 @@ dependencies = [ [[package]] name = "memoffset" -version = "0.6.1" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87" +checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce" dependencies = [ "autocfg 1.0.1", ] @@ -1960,9 +1885,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.7.9" +version = "0.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5dede4e2065b3842b8b0af444119f3aa331cc7cc2dd20388bfb0f5d5a38823a" +checksum = "8067b404fe97c70829f082dec8bcf4f71225d7eaea1d8645349cb76fa06205cc" dependencies = [ "libc", "log", @@ -1973,11 +1898,10 @@ dependencies = [ [[package]] name = "miow" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a33c1b55807fbed163481b5ba66db4b2fa6cde694a5027be10fb724206c5897" +checksum = "b9f1c5b025cda876f66ef43a113f91ebc9f4ccef34843000e0adf6ebbab84e21" dependencies = [ - "socket2 0.3.19", "winapi 0.3.9", ] @@ -2004,7 +1928,7 @@ dependencies = [ "digest 0.9.0", "generic-array 0.14.4", "multihash-derive", - "sha2 0.9.3", + "sha2 0.9.8", "sha3 0.9.1", "unsigned-varint", ] @@ -2025,9 +1949,9 @@ dependencies = [ [[package]] name = "native-tls" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8d96b2e1c8da3957d58100b09f102c6d9cfdfced01b7ec5a8974044bb09dbd4" +checksum = "48ba9f7719b5a0f42f338907614285fb5fd70e53858141f69898a1fb7203b24d" dependencies = [ "lazy_static", "libc", @@ -2041,16 +1965,6 @@ dependencies = [ "tempfile", ] -[[package]] -name = "nb-connect" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "670361df1bc2399ee1ff50406a0d422587dd3bb0da596e1978fe8e05dabddf4f" -dependencies = [ - "libc", - "socket2 0.3.19", -] - [[package]] name = "nodrop" version = "0.1.14" @@ -2093,9 +2007,9 @@ dependencies = [ [[package]] name = "num-bigint" -version = "0.4.0" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e0d047c1062aa51e256408c560894e5251f08925980e53cf1aa5bd00eec6512" +checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" dependencies = [ "autocfg 1.0.1", "num-integer", @@ -2179,15 +2093,18 @@ dependencies = [ [[package]] name = "object" -version = "0.23.0" +version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9a7ab5d64814df0fe4a4b5ead45ed6c5f181ee3ff04ba344313a6c80446c5d4" +checksum = "67ac1d3f9a1d3616fd9a60c8d74296f22406a238b6a72f5cc1e6f314df4ffbf9" +dependencies = [ + "memchr", +] [[package]] name = "once_cell" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "692fcb63b64b1758029e0a96ee63e049ce8c5948587f2f7208df04625e5f6b56" +checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5" [[package]] name = "opaque-debug" @@ -2203,29 +2120,29 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "openssl" -version = "0.10.32" +version = "0.10.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "038d43985d1ddca7a9900630d8cd031b56e4794eecc2e9ea39dd17aa04399a70" +checksum = "0c7ae222234c30df141154f159066c5093ff73b63204dcda7121eb082fc56a95" dependencies = [ "bitflags", "cfg-if 1.0.0", "foreign-types", - "lazy_static", "libc", + "once_cell", "openssl-sys", ] [[package]] name = "openssl-probe" -version = "0.1.2" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de" +checksum = "28988d872ab76095a6e6ac88d99b54fd267702734fd7ffe610ca27f533ddb95a" [[package]] name = "openssl-sys" -version = "0.9.60" +version = "0.9.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "921fc71883267538946025deffb622905ecad223c28efbfdef9bb59a0175f3e6" +checksum = "7e46109c383602735fa0a2e48dd2b7c892b048e1bf69e5c3b1d804b7d9c203cb" dependencies = [ "autocfg 1.0.1", "cc", @@ -2254,29 +2171,23 @@ dependencies = [ [[package]] name = "parity-crypto" -version = "0.3.1" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf1b9c063d87e1507cb3807493c8d21859ef23b5414b39f81c53f0ba267d64c1" +checksum = "17b9db194dfbcfe3b398d63d765437a5c7232d59906e203055f0e993f6458ff1" dependencies = [ - "aes", - "aes-ctr", - "block-modes", - "digest 0.8.1", "quick-error", "ring", - "ripemd160", - "scrypt", - "sha2 0.8.2", + "rust-crypto", "tiny-keccak 1.5.0", ] [[package]] name = "parity-scale-codec" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8975095a2a03bbbdc70a74ab11a4f76a6d0b84680d87c68d722531b0ac28e8a9" +checksum = "373b1a4c1338d9cd3d1fa53b3a11bdab5ab6bd80a20f7f7becd76953ae2be909" dependencies = [ - "arrayvec 0.7.1", + "arrayvec 0.7.2", "bitvec", "byte-slice-cast", "impl-trait-for-tuples", @@ -2286,9 +2197,9 @@ dependencies = [ [[package]] name = "parity-scale-codec-derive" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40dbbfef7f0a1143c5b06e0d76a6278e25dac0bc1af4be51a0fbb73f07e7ad09" +checksum = "1557010476e0595c9b568d16dcfb81b93cdeb157612726f5170d31aa707bed27" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -2324,13 +2235,13 @@ dependencies = [ [[package]] name = "parking_lot" -version = "0.11.1" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d7744ac029df22dca6284efe4e898991d28e3085c706c972bcd7da4a27a15eb" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" dependencies = [ "instant", - "lock_api 0.4.2", - "parking_lot_core 0.8.3", + "lock_api 0.4.5", + "parking_lot_core 0.8.5", ] [[package]] @@ -2341,22 +2252,22 @@ checksum = "94c8c7923936b28d546dfd14d4472eaf34c99b14e1c973a32b3e6d4eb04298c9" dependencies = [ "libc", "rand 0.6.5", - "rustc_version", + "rustc_version 0.2.3", "smallvec 0.6.14", "winapi 0.3.9", ] [[package]] name = "parking_lot_core" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa7a782938e745763fe6907fc6ba86946d72f49fe7e21de074e08128a99fb018" +checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216" dependencies = [ "cfg-if 1.0.0", "instant", "libc", "redox_syscall", - "smallvec 1.6.1", + "smallvec 1.7.0", "winapi 0.3.9", ] @@ -2386,17 +2297,6 @@ dependencies = [ "syn", ] -[[package]] -name = "pbkdf2" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c09cddfbfc98de7f76931acf44460972edb4023eb14d0c6d4018800e552d8e0" -dependencies = [ - "byteorder 1.4.3", - "crypto-mac 0.6.2", - "generic-array 0.9.1", -] - [[package]] name = "percent-encoding" version = "2.1.0" @@ -2405,36 +2305,36 @@ checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" [[package]] name = "phf" -version = "0.8.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dfb61232e34fcb633f43d12c58f83c1df82962dcdfa565a4e866ffc17dafe12" +checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259" dependencies = [ "phf_shared", ] [[package]] name = "phf_shared" -version = "0.8.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c00cf8b9eafe68dde5e9eaa2cef8ee84a9336a47d566ec55ca16589633b65af7" +checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" dependencies = [ "siphasher", ] [[package]] name = "pin-project" -version = "1.0.5" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96fa8ebb90271c4477f144354485b8068bd8f6b78b428b01ba892ca26caf0b63" +checksum = "576bc800220cc65dac09e99e97b08b358cfab6e17078de8dc5fee223bd2d0c08" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.0.5" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "758669ae3558c6f74bd2a18b41f7ac0b5a195aea6639d6a9b5e5d1ad5ba24c0b" +checksum = "6e8fe8163d14ce7f0cdac2e040116f22eac817edabff0be91e8aff7e9accf389" dependencies = [ "proc-macro2", "quote", @@ -2443,9 +2343,9 @@ dependencies = [ [[package]] name = "pin-project-lite" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc0e1f259c92177c30a4c9d177246edd0a3568b25756a977d0632cf8fa37e905" +checksum = "8d31d11c69a6b52a174b42bdc0c30e5e11670f90788b2c471c31c1d17d449443" [[package]] name = "pin-utils" @@ -2455,20 +2355,20 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "pkg-config" -version = "0.3.19" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3831453b3449ceb48b6d9c7ad7c96d5ea673e9b470a1dc578c2ce6521230884c" +checksum = "58893f751c9b0412871a09abd62ecd2a00298c6c83befa223ef98c52aef40cbe" [[package]] name = "polling" -version = "2.0.2" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2a7bc6b2a29e632e45451c941832803a18cce6781db04de8a04696cdca8bde4" +checksum = "685404d509889fade3e86fe3a5803bca2ec09b0c0778d5ada6ec8bf7a8de5259" dependencies = [ - "cfg-if 0.1.10", + "cfg-if 1.0.0", "libc", "log", - "wepoll-sys", + "wepoll-ffi", "winapi 0.3.9", ] @@ -2486,9 +2386,9 @@ dependencies = [ [[package]] name = "postgres" -version = "0.19.0" +version = "0.19.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f853fba627ed1f21392d329eeb03caf90dce57a65dfbd24274f4c39452ed3bb" +checksum = "eb76d6535496f633fa799bb872ffb4790e9cbdedda9d35564ca0252f930c0dd5" dependencies = [ "bytes", "fallible-iterator", @@ -2500,9 +2400,8 @@ dependencies = [ [[package]] name = "postgres-derive" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c857dd221cb0e7d8414b894a0ce29eae44d453dda0baa132447878e75e701477" +version = "0.4.1" +source = "git+https://github.com/elpiel/rust-postgres?branch=boxed-dyn-ToSql#c516805275aaaf106e8e512f53a9b0234f707583" dependencies = [ "proc-macro2", "quote", @@ -2524,42 +2423,58 @@ dependencies = [ [[package]] name = "postgres-protocol" -version = "0.6.1" +version = "0.6.2" +source = "git+https://github.com/elpiel/rust-postgres?branch=boxed-dyn-ToSql#c516805275aaaf106e8e512f53a9b0234f707583" +dependencies = [ + "base64", + "byteorder 1.4.3", + "bytes", + "fallible-iterator", + "hmac 0.11.0", + "md-5 0.9.1", + "memchr", + "rand 0.8.4", + "sha2 0.9.8", + "stringprep", +] + +[[package]] +name = "postgres-protocol" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff3e0f70d32e20923cabf2df02913be7c1842d4c772db8065c00fcfdd1d1bff3" +checksum = "79ec03bce71f18b4a27c4c64c6ba2ddf74686d69b91d8714fb32ead3adaed713" dependencies = [ "base64", "byteorder 1.4.3", "bytes", "fallible-iterator", - "hmac 0.10.1", - "md-5", + "hmac 0.12.0", + "md-5 0.10.0", "memchr", - "rand 0.8.3", - "sha2 0.9.3", + "rand 0.8.4", + "sha2 0.10.0", "stringprep", ] [[package]] name = "postgres-types" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "430f4131e1b7657b0cd9a2b0c3408d77c9a43a042d300b8c77f981dffcc43a2f" +version = "0.2.2" +source = "git+https://github.com/elpiel/rust-postgres?branch=boxed-dyn-ToSql#c516805275aaaf106e8e512f53a9b0234f707583" dependencies = [ "bytes", "chrono", "fallible-iterator", "postgres-derive", - "postgres-protocol", + "postgres-protocol 0.6.2", "serde", "serde_json", ] [[package]] name = "ppv-lite86" -version = "0.2.10" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" +checksum = "ed0cfbc8191465bed66e1718596ee0b0b35d5ee1f41c5df2189d0fe8bde535ba" [[package]] name = "pretty_assertions" @@ -2567,7 +2482,7 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0cfe1b2403f172ba0f234e500906ee0a3e493fb81092dac23ebefe129301cc" dependencies = [ - "ansi_term 0.12.1", + "ansi_term", "ctor", "diff", "output_vt100", @@ -2575,15 +2490,15 @@ dependencies = [ [[package]] name = "primitive-types" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2415937401cb030a2a0a4d922483f945fa068f52a7dbb22ce0fe5f2b6f6adace" +checksum = "06345ee39fbccfb06ab45f3a1a5798d9dafa04cb8921a76d227040003a234b0e" dependencies = [ "fixed-hash 0.7.0", "impl-codec", "impl-rlp", "impl-serde", - "uint 0.9.0", + "uint 0.9.1", ] [[package]] @@ -2607,7 +2522,7 @@ dependencies = [ "once_cell", "parse-display", "pretty_assertions", - "rand 0.8.3", + "rand 0.8.4", "serde", "serde-hex", "serde_json", @@ -2629,9 +2544,9 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41fdbd1df62156fbc5945f4762632564d7d038153091c3fcf1067f6aef7cff92" +checksum = "1ebace6889caf889b4d3f76becee12e90353f2b8c7d875534a71e5742f8f6f83" dependencies = [ "thiserror", "toml", @@ -2675,9 +2590,9 @@ checksum = "bc881b2c22681370c6a780e47af9840ef841837bc98118431d4e1868bd0c1086" [[package]] name = "proc-macro2" -version = "1.0.27" +version = "1.0.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0d8caf72986c1a598726adc988bb5984792ef84f5ee5aa50209145ee8077038" +checksum = "2f84e92c0f7c9d58328b85a78557813e4bd845130db68d7184635344399423b1" dependencies = [ "unicode-xid", ] @@ -2690,9 +2605,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" -version = "1.0.9" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7" +checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05" dependencies = [ "proc-macro2", ] @@ -2760,14 +2675,14 @@ dependencies = [ [[package]] name = "rand" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ef9e7e66b4468674bfcb0c81af8b7fa0bb154fa9f28eb840da5c447baeb8d7e" +checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8" dependencies = [ "libc", - "rand_chacha 0.3.0", - "rand_core 0.6.2", - "rand_hc 0.3.0", + "rand_chacha 0.3.1", + "rand_core 0.6.3", + "rand_hc 0.3.1", ] [[package]] @@ -2792,12 +2707,12 @@ dependencies = [ [[package]] name = "rand_chacha" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e12735cf05c9e10bf21534da50a147b924d555dc7a547c42e6bb2d5b6017ae0d" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core 0.6.2", + "rand_core 0.6.3", ] [[package]] @@ -2826,11 +2741,11 @@ dependencies = [ [[package]] name = "rand_core" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34cf66eb183df1c5876e2dcf6b13d57340741e8dc255b48e40a26de954d06ae7" +checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" dependencies = [ - "getrandom 0.2.2", + "getrandom 0.2.3", ] [[package]] @@ -2853,11 +2768,11 @@ dependencies = [ [[package]] name = "rand_hc" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3190ef7066a446f2e7f42e239d161e905420ccab01eb967c9eb27d21b2322a73" +checksum = "d51e9f596de227fda2ea6c84607f5558e196eeaf43c986b724ba4fb8fdf497e7" dependencies = [ - "rand_core 0.6.2", + "rand_core 0.6.3", ] [[package]] @@ -2915,9 +2830,9 @@ dependencies = [ [[package]] name = "rayon" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b0d8e0819fadc20c74ea8373106ead0600e3a67ef1fe8da56e39b9ae7275674" +checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90" dependencies = [ "autocfg 1.0.1", "crossbeam-deque", @@ -2927,9 +2842,9 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.9.0" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ab346ac5921dc62ffa9f89b7a773907511cdfa5490c572ae9be1be33e8afa4a" +checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e" dependencies = [ "crossbeam-channel", "crossbeam-deque", @@ -2958,7 +2873,7 @@ dependencies = [ "combine", "dtoa", "futures-util", - "itoa", + "itoa 0.4.8", "percent-encoding", "pin-project-lite", "sha1", @@ -2969,9 +2884,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.2.5" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94341e4e44e24f6b591b59e47a8a027df12e008d73fd5672dbea9cc22f4507d9" +checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff" dependencies = [ "bitflags", ] @@ -2982,15 +2897,15 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "528532f3d801c87aec9def2add9ca802fe569e44a544afe633765267840abe64" dependencies = [ - "getrandom 0.2.2", + "getrandom 0.2.3", "redox_syscall", ] [[package]] name = "regex" -version = "1.4.6" +version = "1.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a26af418b574bd56588335b3a3659a65725d4e636eb1016c2f9e3b38c7cc759" +checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" dependencies = [ "aho-corasick", "memchr", @@ -3014,9 +2929,9 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.11.2" +version = "0.11.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf12057f289428dbf5c591c74bf10392e4a8003f993405a902f20117019022d4" +checksum = "07bea77bc708afa10e59905c3d4af7c8fd43c9214251673095ff8b14345fcbc5" dependencies = [ "base64", "bytes", @@ -3061,22 +2976,11 @@ dependencies = [ "winapi 0.3.9", ] -[[package]] -name = "ripemd160" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad5112e0dbbb87577bfbc56c42450235e3012ce336e29c5befd7807bd626da4a" -dependencies = [ - "block-buffer 0.7.3", - "digest 0.8.1", - "opaque-debug 0.2.3", -] - [[package]] name = "rlp" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e54369147e3e7796c9b885c7304db87ca3d09a0a98f72843d532868675bbfba8" +checksum = "999508abb0ae792aabed2460c45b89106d97fe4adac593bdaef433c2605847b5" dependencies = [ "bytes", "rustc-hex 2.1.0", @@ -3097,9 +3001,9 @@ dependencies = [ [[package]] name = "rustc-demangle" -version = "0.1.18" +version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e3bad0ee36814ca07d7968269dd4b7ec89ec2da10c4bb613928d3077083c232" +checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342" [[package]] name = "rustc-hex" @@ -3125,20 +3029,29 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" dependencies = [ - "semver", + "semver 0.9.0", +] + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver 1.0.4", ] [[package]] name = "rustversion" -version = "1.0.4" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb5d2a036dc6d2d8fd16fde3498b04306e29bd193bf306a57427019b823d5acd" +checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f" [[package]] name = "ryu" -version = "1.0.5" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" +checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" [[package]] name = "same-file" @@ -3171,42 +3084,29 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" -[[package]] -name = "scrypt" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8570c5e2fa69cb29d492fd4e9974b6b5facb5a888e1c6da630d4a3cd7ebfef4a" -dependencies = [ - "byte-tools 0.3.1", - "byteorder 1.4.3", - "hmac 0.6.3", - "pbkdf2", - "sha2 0.7.1", -] - [[package]] name = "secp256k1" -version = "0.20.1" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "733b114f058f260c0af7591434eef4272ae1a8ec2751766d3cb89c6df8d5e450" +checksum = "97d03ceae636d0fed5bae6a7f4f664354c5f4fcedf6eef053fef17e49f837d0a" dependencies = [ "secp256k1-sys", ] [[package]] name = "secp256k1-sys" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67e4b6455ee49f5901c8985b88f98fb0a0e1d90a6661f5a03f4888bd987dad29" +checksum = "827cb7cce42533829c792fc51b82fbf18b125b45a702ef2c8be77fce65463a7b" dependencies = [ "cc", ] [[package]] name = "security-framework" -version = "2.1.2" +version = "2.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d493c5f39e02dfb062cd8f33301f90f9b13b650e8c1b1d0fd75c19dd64bff69d" +checksum = "525bc1abfda2e1998d152c45cf13e696f76d0a4972310b22fac1658b05df7c87" dependencies = [ "bitflags", "core-foundation", @@ -3217,9 +3117,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.1.1" +version = "2.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dee48cdde5ed250b0d3252818f646e174ab414036edb884dde62d80a3ac6082d" +checksum = "a9dd14d83160b528b7bfd66439110573efcfbe281b17fc2ca9f39f550d619c7e" dependencies = [ "core-foundation-sys", "libc", @@ -3234,6 +3134,12 @@ dependencies = [ "semver-parser", ] +[[package]] +name = "semver" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "568a8e6258aa33c13358f81fd834adb854c6f7c9468520910a9b1e8fac068012" + [[package]] name = "semver-parser" version = "0.7.0" @@ -3250,7 +3156,7 @@ dependencies = [ "chrono", "clap", "dashmap", - "deadpool 0.8.0", + "deadpool 0.8.2", "deadpool-postgres", "envy", "futures", @@ -3273,9 +3179,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.124" +version = "1.0.131" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd761ff957cb2a45fbb9ab3da6512de9de55872866160b23c25f1a841e99d29f" +checksum = "b4ad69dfbd3e45369132cc64e6748c2d65cdfb001a2b1c232d128b4ad60561c1" dependencies = [ "serde_derive", ] @@ -3293,9 +3199,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.124" +version = "1.0.131" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1800f7693e94e186f5e25a28291ae1570da908aff7d97a095dec1e56ff99069b" +checksum = "b710a83c4e0dff6a3d511946b95274ad9ca9e5d3ae497b63fda866ac955358d2" dependencies = [ "proc-macro2", "quote", @@ -3304,11 +3210,11 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.64" +version = "1.0.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "799e97dc9fdae36a5c8b8f2cae9ce2ee9fdce2058c57a93e6099d919fd982f79" +checksum = "bcbd0344bc6533bc7ec56df11d42fb70f1b912351c0825ccb7211b59d8af7cf5" dependencies = [ - "itoa", + "itoa 1.0.1", "ryu", "serde", ] @@ -3324,11 +3230,10 @@ dependencies = [ [[package]] name = "serde_qs" -version = "0.7.2" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5af82de3c6549b001bec34961ff2d6a54339a87bab37ce901b693401f27de6cb" +checksum = "c7715380eec75f029a4ef7de39a9200e0a63823176b759d055b613f5a87df6a6" dependencies = [ - "data-encoding", "percent-encoding", "serde", "thiserror", @@ -3341,16 +3246,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "edfa57a7f8d9c1d260a549e7224100f6c43d43f9103e06dd8b4095a9b2b43ce9" dependencies = [ "form_urlencoded", - "itoa", + "itoa 0.4.8", "ryu", "serde", ] [[package]] name = "serde_with" -version = "1.9.4" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ad9fdbb69badc8916db738c25efd04f0a65297d26c2f8de4b62e57b8c12bc72" +checksum = "ad6056b4cb69b6e43e3a0f055def223380baecc99da683884f205bf347f7c4b3" dependencies = [ "rustversion", "serde", @@ -3359,9 +3264,9 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "1.4.2" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1569374bd54623ec8bd592cf22ba6e03c0f177ff55fbc8c29a49e296e7adecf" +checksum = "12e47be9471c72889ebafb5e14d5ff930d89ae7a67bbdb5f8abb564f845a927e" dependencies = [ "darling", "proc-macro2", @@ -3371,13 +3276,13 @@ dependencies = [ [[package]] name = "sha-1" -version = "0.9.4" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfebf75d25bd900fd1e7d11501efab59bc846dbc76196839663e6637bba9f25f" +checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" dependencies = [ "block-buffer 0.9.0", "cfg-if 1.0.0", - "cpuid-bool", + "cpufeatures", "digest 0.9.0", "opaque-debug 0.3.0", ] @@ -3390,39 +3295,26 @@ checksum = "2579985fda508104f7587689507983eadd6a6e84dd35d6d115361f530916fa0d" [[package]] name = "sha2" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9eb6be24e4c23a84d7184280d2722f7f2731fcdd4a9d886efbfe4413e4847ea0" -dependencies = [ - "block-buffer 0.3.3", - "byte-tools 0.2.0", - "digest 0.7.6", - "fake-simd", -] - -[[package]] -name = "sha2" -version = "0.8.2" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a256f46ea78a0c0d9ff00077504903ac881a1dafdc20da66545699e7776b3e69" +checksum = "b69f9a4c9740d74c5baa3fd2e547f9525fa8088a8a958e0ca2409a514e33f5fa" dependencies = [ - "block-buffer 0.7.3", - "digest 0.8.1", - "fake-simd", - "opaque-debug 0.2.3", + "block-buffer 0.9.0", + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.9.0", + "opaque-debug 0.3.0", ] [[package]] name = "sha2" -version = "0.9.3" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa827a14b29ab7f44778d14a88d3cb76e949c45083f7dbfa507d0cb699dc12de" +checksum = "900d964dd36bb15bcf2f2b35694c072feab74969a54f2bbeec7a2d725d2bdcb6" dependencies = [ - "block-buffer 0.9.0", "cfg-if 1.0.0", - "cpuid-bool", - "digest 0.9.0", - "opaque-debug 0.3.0", + "cpufeatures", + "digest 0.10.1", ] [[package]] @@ -3432,7 +3324,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd26bc0e7a2e3a7c959bc494caf58b72ee0c71d67704e9520f736ca7e4853ecf" dependencies = [ "block-buffer 0.7.3", - "byte-tools 0.3.1", + "byte-tools", "digest 0.8.1", "keccak", "opaque-debug 0.2.3", @@ -3452,24 +3344,24 @@ dependencies = [ [[package]] name = "signal-hook-registry" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16f1d0fef1604ba8f7a073c7e701f213e056707210e9020af4528e0101ce11a6" +checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0" dependencies = [ "libc", ] [[package]] name = "siphasher" -version = "0.3.3" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa8f3741c7372e75519bd9346068370c9cdaabcc1f9599cbcf2a2719352286b7" +checksum = "533494a8f9b724d33625ab53c6c4800f7cc445895924a8ef649222dcb76e938b" [[package]] name = "slab" -version = "0.4.2" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" +checksum = "9def91fd1e018fe007022791f865d0ccc9b3a0d5001e01aabb8b40e46000afb5" [[package]] name = "slog" @@ -3479,9 +3371,9 @@ checksum = "8347046d4ebd943127157b94d63abb990fcf729dc4e9978927fdf4ac3c998d06" [[package]] name = "slog-async" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c60813879f820c85dbc4eabf3269befe374591289019775898d56a81a804fbdc" +checksum = "766c59b252e62a34651412870ff55d8c4e6d04df19b43eecb2703e417b097ffe" dependencies = [ "crossbeam-channel", "slog", @@ -3513,26 +3405,15 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe0f37c9e8f3c5a4a66ad655a93c74daac4ad00c441533bf5c6e7990bb42604e" - -[[package]] -name = "socket2" -version = "0.3.19" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "122e570113d28d773067fab24266b66753f6ea915758651696b6e35e49f88d6e" -dependencies = [ - "cfg-if 1.0.0", - "libc", - "winapi 0.3.9", -] +checksum = "1ecab6c735a6bb4139c0caafd0cc3635748bbb3acf4550e8138122099251f309" [[package]] name = "socket2" -version = "0.4.0" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e3dfc207c526015c632472a77be09cf1b6e46866581aecae5cc38fb4235dea2" +checksum = "5dc90fe6c7be1a323296982db1836d1ea9e47b6839496dde9a541bc496df3516" dependencies = [ "libc", "winapi 0.3.9", @@ -3549,7 +3430,7 @@ dependencies = [ "futures", "httparse", "log", - "rand 0.8.3", + "rand 0.8.4", "sha-1", ] @@ -3571,15 +3452,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" -[[package]] -name = "stream-cipher" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8131256a5896cabcf5eb04f4d6dacbe1aefda854b0d9896e09cb58829ec5638c" -dependencies = [ - "generic-array 0.12.4", -] - [[package]] name = "stringprep" version = "0.1.2" @@ -3637,15 +3509,15 @@ dependencies = [ [[package]] name = "subtle" -version = "2.4.0" +version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e81da0851ada1f3e9d4312c704aa4f8806f0f9d69faaf8df2f3464b4a9437c2" +checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" [[package]] name = "syn" -version = "1.0.73" +version = "1.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f71489ff30030d2ae598524f61326b902466f72a0fb1a8564c001cc63425bcc7" +checksum = "8daf5dd0bb60cbd4137b1b587d2fc0ae729bc07cf01cd70b36a1ed5ade3b9d59" dependencies = [ "proc-macro2", "quote", @@ -3654,9 +3526,9 @@ dependencies = [ [[package]] name = "synstructure" -version = "0.12.4" +version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b834f2d66f734cb897113e34aaff2f1ab4719ca946f9a7358dba8f8064148701" +checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ "proc-macro2", "quote", @@ -3694,7 +3566,7 @@ checksum = "dac1c663cfc93810f88aed9b8941d48cabf856a1b111c29a40439018d870eb22" dependencies = [ "cfg-if 1.0.0", "libc", - "rand 0.8.3", + "rand 0.8.4", "redox_syscall", "remove_dir_all", "winapi 0.3.9", @@ -3743,18 +3615,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.24" +version = "1.0.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0f4a65597094d4483ddaed134f409b2cb7c1beccf25201a9f73c719254fa98e" +checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.24" +version = "1.0.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7765189610d8241a44529806d6fd1f2e0a08734313a35d5b3a556f92b381f3c0" +checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" dependencies = [ "proc-macro2", "quote", @@ -3801,9 +3673,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.1.1" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317cca572a0e89c3ce0ca1f1bdc9369547fe318a683418e42ac8f59d14701023" +checksum = "2c1c1d5a42b6245520c249549ec267180beaffcc0615401ac8e31853d4b6d8d2" dependencies = [ "tinyvec_macros", ] @@ -3816,18 +3688,17 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "1.12.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2c2416fdedca8443ae44b4527de1ea633af61d8f7169ffa6e72c5b53d24efcc" +checksum = "fbbf1c778ec206785635ce8ad57fe52b3009ae9e0c9f574a728f3049d3e55838" dependencies = [ - "autocfg 1.0.1", "bytes", "libc", "memchr", "mio", "num_cpus", "once_cell", - "parking_lot 0.11.1", + "parking_lot 0.11.2", "pin-project-lite", "signal-hook-registry", "tokio-macros", @@ -3836,9 +3707,9 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "1.1.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "caf7b11a536f46a809a8a9f0bb4237020f70ecbf115b842360afb127ea2fda57" +checksum = "b557f72f448c511a979e2564e55d74e6c4432fc96ff4f6241bc6bded342643b7" dependencies = [ "proc-macro2", "quote", @@ -3857,9 +3728,9 @@ dependencies = [ [[package]] name = "tokio-postgres" -version = "0.7.2" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d2b1383c7e4fb9a09e292c7c6afb7da54418d53b045f1c1fac7a911411a2b8b" +checksum = "4b6c8b33df661b548dcd8f9bf87debb8c56c05657ed291122e1188698c2ece95" dependencies = [ "async-trait", "byteorder 1.4.3", @@ -3867,22 +3738,22 @@ dependencies = [ "fallible-iterator", "futures", "log", - "parking_lot 0.11.1", + "parking_lot 0.11.2", "percent-encoding", "phf", "pin-project-lite", - "postgres-protocol", + "postgres-protocol 0.6.3", "postgres-types", - "socket2 0.4.0", + "socket2", "tokio", "tokio-util", ] [[package]] name = "tokio-stream" -version = "0.1.4" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c535f53c0cfa1acace62995a8994fc9cc1f12d202420da96ff306ee24d576469" +checksum = "50145484efff8818b5ccd256697f36863f587da82cf8b409c53adf1e840798e3" dependencies = [ "futures-core", "pin-project-lite", @@ -3891,9 +3762,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.6.4" +version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec31e5cc6b46e653cf57762f36f71d5e6386391d88a72fd6db4508f8f676fb29" +checksum = "9e99e1983e5d376cd8eb4b66604d2e99e79f5bd988c3055891dcd8c9e2604cc0" dependencies = [ "bytes", "futures-core", @@ -3921,9 +3792,9 @@ checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" [[package]] name = "tracing" -version = "0.1.25" +version = "0.1.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01ebdc2bb4498ab1ab5f5b73c5803825e60199229ccba0698170e3be0e7f959f" +checksum = "375a639232caf30edfc78e8d89b2d4c375515393e7af7e16f01cd96917fb2105" dependencies = [ "cfg-if 1.0.0", "pin-project-lite", @@ -3932,9 +3803,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.17" +version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f50de3927f93d202783f4513cda820ab47ef17f624b03c096e86ef00c67e6b5f" +checksum = "1f4ed65637b8390770814083d20756f87bfa2c21bf2f110babdc5438351746e4" dependencies = [ "lazy_static", ] @@ -3947,9 +3818,9 @@ checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" [[package]] name = "typenum" -version = "1.12.0" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33" +checksum = "b63708a265f51345575b27fe43f9500ad611579e764c79edbc2037b1121959ec" [[package]] name = "uint" @@ -3965,9 +3836,9 @@ dependencies = [ [[package]] name = "uint" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e11fe9a9348741cf134085ad57c249508345fe16411b3d7fb4ff2da2f1d6382e" +checksum = "6470ab50f482bde894a037a57064480a246dbfdd5960bd65a44824693f08da5f" dependencies = [ "byteorder 1.4.3", "crunchy 0.2.2", @@ -3977,39 +3848,36 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.4" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" -dependencies = [ - "matches", -] +checksum = "1a01404663e3db436ed2746d9fefef640d868edae3cceb81c3b8d5732fda678f" [[package]] name = "unicode-normalization" -version = "0.1.17" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07fbfce1c8a97d547e8b5334978438d9d6ec8c20e38f56d4a4374d181493eaef" +checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9" dependencies = [ "tinyvec", ] [[package]] name = "unicode-width" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3" +checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" [[package]] name = "unicode-xid" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" +checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" [[package]] name = "unsigned-varint" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f8d425fafb8cd76bc3f22aace4af471d3156301d7508f2107e98fbeae10bc7f" +checksum = "d86a8dc7f45e4c1b0d30e43038c38f274e77af056aa5f74b93c2cf9eb3c1c836" [[package]] name = "untrusted" @@ -4019,9 +3887,9 @@ checksum = "55cd1f4b4e96b46aeb8d4855db4a7a9bd96eeeb5c6a1ab54593328761642ce2f" [[package]] name = "url" -version = "2.2.1" +version = "2.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ccd964113622c8e9322cfac19eb1004a07e636c545f325da085d5cdde6f1f8b" +checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c" dependencies = [ "form_urlencoded", "idna", @@ -4036,7 +3904,7 @@ version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" dependencies = [ - "getrandom 0.2.2", + "getrandom 0.2.3", ] [[package]] @@ -4065,24 +3933,19 @@ dependencies = [ [[package]] name = "value-bag" -version = "1.0.0-alpha.6" +version = "1.0.0-alpha.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b676010e055c99033117c2343b33a40a30b91fecd6c49055ac9cd2d6c305ab1" +checksum = "79923f7731dc61ebfba3633098bf3ac533bbd35ccd8c57e7088d9a5eebe0263f" dependencies = [ "ctor", + "version_check", ] [[package]] name = "vcpkg" -version = "0.2.11" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b00bca6106a5e23f3eee943593759b7fcddb00554332e856d990c893966879fb" - -[[package]] -name = "vec-arena" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eafc1b9b2dfc6f5529177b62cf806484db55b32dc7c9658a118e11bbeb33061d" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "vec_map" @@ -4092,9 +3955,9 @@ checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" [[package]] name = "version_check" -version = "0.9.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed" +checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe" [[package]] name = "waker-fn" @@ -4104,9 +3967,9 @@ checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" [[package]] name = "walkdir" -version = "2.3.1" +version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "777182bc735b6424e1a57516d35ed72cb8019d85c8c9bf536dccb3445c1a2f7d" +checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" dependencies = [ "same-file", "winapi 0.3.9", @@ -4137,21 +4000,19 @@ checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" [[package]] name = "wasm-bindgen" -version = "0.2.71" +version = "0.2.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ee1280240b7c461d6a0071313e08f34a60b0365f14260362e5a2b17d1d31aa7" +checksum = "632f73e236b219150ea279196e54e610f5dbafa5d61786303d4da54f84e47fce" dependencies = [ "cfg-if 1.0.0", - "serde", - "serde_json", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.71" +version = "0.2.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b7d8b6942b8bb3a9b0e73fc79b98095a27de6fa247615e59d096754a3bc2aa8" +checksum = "a317bf8f9fba2476b4b2c85ef4c4af8ff39c3c7f0cdfeed4f82c34a880aa837b" dependencies = [ "bumpalo", "lazy_static", @@ -4164,9 +4025,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.21" +version = "0.4.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e67a5806118af01f0d9045915676b22aaebecf4178ae7021bc171dab0b897ab" +checksum = "8e8d7523cb1f2a4c96c1317ca690031b714a51cc14e05f712446691f413f5d39" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -4176,9 +4037,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.71" +version = "0.2.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5ac38da8ef716661f0f36c0d8320b89028efe10c7c0afde65baffb496ce0d3b" +checksum = "d56146e7c495528bf6587663bea13a8eb588d39b36b679d83972e1a2dbbdacf9" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -4186,9 +4047,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.71" +version = "0.2.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc053ec74d454df287b9374ee8abb36ffd5acb95ba87da3ba5b7d3fe20eb401e" +checksum = "7803e0eea25835f8abdc585cd3021b3deb11543c6fe226dcd30b228857c5c5ab" dependencies = [ "proc-macro2", "quote", @@ -4199,15 +4060,15 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.71" +version = "0.2.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d6f8ec44822dd71f5f221a5847fb34acd9060535c1211b70a05844c0f6383b1" +checksum = "0237232789cf037d5480773fe568aac745bfe2afbc11a863e97901780a6b47cc" [[package]] name = "web-sys" -version = "0.3.48" +version = "0.3.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec600b26223b2948cedfde2a0aa6756dcf1fef616f43d7b3097aaf53a6c4d92b" +checksum = "38eb105f1c59d9eaa6b5cdc92b859d85b926e82cb2e0945cd0c9259faa6fe9fb" dependencies = [ "js-sys", "wasm-bindgen", @@ -4231,7 +4092,7 @@ dependencies = [ "hex", "jsonrpc-core", "log", - "parking_lot 0.11.1", + "parking_lot 0.11.2", "pin-project", "reqwest", "rlp", @@ -4260,10 +4121,10 @@ dependencies = [ ] [[package]] -name = "wepoll-sys" -version = "3.0.1" +name = "wepoll-ffi" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fcb14dea929042224824779fbc82d9fab8d2e6d3cbc0ac404de8edf489e77ff" +checksum = "d743fdedc5c64377b5fc2bc036b01c7fd642205a0d96356034ae3404d49eb7fb" dependencies = [ "cc", ] @@ -4322,9 +4183,9 @@ dependencies = [ [[package]] name = "wiremock" -version = "0.5.4" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3aa40f263823d9e69901f5b08ea3244bc193648fb87016022ac265a81cfb81e3" +checksum = "4c3fe7c6af90383100cd1486ef0467c2ebead0303ed7aa3dc6e51173ee3ff8ba" dependencies = [ "async-trait", "deadpool 0.7.0", diff --git a/Cargo.toml b/Cargo.toml index 7ee561b02..0e566c655 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,3 +8,6 @@ members = [ "sentry", "test_harness", ] + +[patch.crates-io] +postgres-types = { git = "https://github.com/elpiel/rust-postgres", branch = "boxed-dyn-ToSql"} \ No newline at end of file From 69a2448fe6687a996b88931b130665d78a272e1a Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Fri, 31 Dec 2021 17:51:49 +0200 Subject: [PATCH 17/24] primitives - senty: - FetchedAnalytics - use DateTime - test impl DateHour - &Timeframe --- primitives/src/sentry.rs | 52 ++++++++++++++++++++++------------------ 1 file changed, 29 insertions(+), 23 deletions(-) diff --git a/primitives/src/sentry.rs b/primitives/src/sentry.rs index 9e4b5baea..e6619f4ba 100644 --- a/primitives/src/sentry.rs +++ b/primitives/src/sentry.rs @@ -5,7 +5,10 @@ use crate::{ validator::{ApproveState, Heartbeat, MessageTypes, NewState, Type as MessageType}, Address, Balances, BigNum, CampaignId, Channel, ChannelId, UnifiedNum, ValidatorId, IPFS, }; -use chrono::{Date, DateTime, Duration, NaiveDate, TimeZone, Timelike, Utc}; +use chrono::{ + serde::ts_milliseconds, Date, DateTime, Duration, NaiveDate, TimeZone, Timelike, + Utc, +}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use std::{collections::HashMap, fmt, hash::Hash, ops::Sub}; use thiserror::Error; @@ -207,7 +210,8 @@ pub struct Analytics { #[serde(rename_all = "camelCase")] pub struct FetchedAnalytics { // time is represented as a timestamp - pub time: i64, + #[serde(with = "ts_milliseconds")] + pub time: DateTime, pub value: UnifiedNum, // We can't know the exact segment type but it can always be represented as a string pub segment: Option, @@ -716,27 +720,6 @@ pub mod campaign_create { } } - // /// This implementation helps with test setup - // /// **NOTE:** It erases the CampaignId, since the creation of the campaign gives it's CampaignId - // impl From for CreateCampaign { - // fn from(campaign: Campaign) -> Self { - // Self { - // id: Some(campaign.id), - // channel: campaign.channel, - // creator: campaign.creator, - // budget: campaign.budget, - // validators: campaign.validators, - // title: campaign.title, - // pricing_bounds: campaign.pricing_bounds, - // event_submission: campaign.event_submission, - // ad_units: campaign.ad_units, - // targeting_rules: campaign.targeting_rules, - // created: campaign.created, - // active: campaign.active, - // } - // } - // } - // All editable fields stored in one place, used for checking when a budget is changed #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub struct ModifyCampaign { @@ -970,4 +953,27 @@ mod test { serde_json::to_value(click).expect("should serialize") ); } + + #[test] + fn datehour_subtract_timeframe() { + // test with End of year + { + let datehour = DateHour::from_ymdh(2021, 12, 31, 22); + + let yesterday = datehour - &Timeframe::Day; + let last_week = datehour - &Timeframe::Week; + let beginning_of_month = datehour - &Timeframe::Month; + let last_year = datehour - &Timeframe::Year; + + pretty_assertions::assert_eq!(DateHour::from_ymdh(2021, 12, 30, 22), yesterday); + pretty_assertions::assert_eq!(DateHour::from_ymdh(2021, 12, 24, 22), last_week); + // Subtracting uses hours so result has different Hour! + pretty_assertions::assert_eq!(DateHour::from_ymdh(2021, 12, 1, 12), beginning_of_month); + pretty_assertions::assert_eq!(DateHour::from_ymdh(2020, 12, 31, 22), last_year); + } + + let middle_of_month = DateHour::from_ymdh(2021, 12, 14, 12) - &Timeframe::Month; + // Subtracting uses hours so result has different Hour! + pretty_assertions::assert_eq!(DateHour::from_ymdh(2021, 11, 14, 2), middle_of_month); + } } From 96fec6449bd82f350bab8cec6e9589c12892124c Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Fri, 31 Dec 2021 17:52:47 +0200 Subject: [PATCH 18/24] Cargo patch postgres-types: Use new branch `from-T-for-boxed-dyn-ToSql` --- Cargo.lock | 6 +++--- Cargo.toml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0fd3b8c8f..dbcd98bb1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2401,7 +2401,7 @@ dependencies = [ [[package]] name = "postgres-derive" version = "0.4.1" -source = "git+https://github.com/elpiel/rust-postgres?branch=boxed-dyn-ToSql#c516805275aaaf106e8e512f53a9b0234f707583" +source = "git+https://github.com/elpiel/rust-postgres?branch=from-T-for-boxed-dyn-ToSql#2c9b9a6bc76d71cc8d34c02ac51bf09999728768" dependencies = [ "proc-macro2", "quote", @@ -2424,7 +2424,7 @@ dependencies = [ [[package]] name = "postgres-protocol" version = "0.6.2" -source = "git+https://github.com/elpiel/rust-postgres?branch=boxed-dyn-ToSql#c516805275aaaf106e8e512f53a9b0234f707583" +source = "git+https://github.com/elpiel/rust-postgres?branch=from-T-for-boxed-dyn-ToSql#2c9b9a6bc76d71cc8d34c02ac51bf09999728768" dependencies = [ "base64", "byteorder 1.4.3", @@ -2459,7 +2459,7 @@ dependencies = [ [[package]] name = "postgres-types" version = "0.2.2" -source = "git+https://github.com/elpiel/rust-postgres?branch=boxed-dyn-ToSql#c516805275aaaf106e8e512f53a9b0234f707583" +source = "git+https://github.com/elpiel/rust-postgres?branch=from-T-for-boxed-dyn-ToSql#2c9b9a6bc76d71cc8d34c02ac51bf09999728768" dependencies = [ "bytes", "chrono", diff --git a/Cargo.toml b/Cargo.toml index 0e566c655..f4ad77714 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,4 +10,4 @@ members = [ ] [patch.crates-io] -postgres-types = { git = "https://github.com/elpiel/rust-postgres", branch = "boxed-dyn-ToSql"} \ No newline at end of file +postgres-types = { git = "https://github.com/elpiel/rust-postgres", branch = "from-T-for-boxed-dyn-ToSql"} \ No newline at end of file From 8e842ee167ce11ad231362eb9f540d9d4ff165ca Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Fri, 31 Dec 2021 17:54:15 +0200 Subject: [PATCH 19/24] primitives - analytics - enum AllowedKeys & struct Time --- primitives/src/analytics.rs | 185 ++++++++-------- primitives/src/analytics/query.rs | 178 +++++++++++++++ sentry/src/db/analytics.rs | 89 ++++---- sentry/src/lib.rs | 35 ++- sentry/src/routes/analytics.rs | 349 +++++++++++++++--------------- 5 files changed, 513 insertions(+), 323 deletions(-) create mode 100644 primitives/src/analytics/query.rs diff --git a/primitives/src/analytics.rs b/primitives/src/analytics.rs index 2c68ddde4..774aeeee1 100644 --- a/primitives/src/analytics.rs +++ b/primitives/src/analytics.rs @@ -1,18 +1,35 @@ use crate::{sentry::DateHour, Address, CampaignId, ValidatorId, IPFS}; -use chrono::{DateTime, NaiveDateTime, Utc}; +use chrono::{serde::ts_milliseconds_option, Utc}; use parse_display::Display; use serde::{Deserialize, Deserializer, Serialize}; +use self::query::AllowedKey; + pub const ANALYTICS_QUERY_LIMIT: u32 = 200; #[cfg(feature = "postgres")] pub mod postgres { - use super::{AnalyticsQueryKey, AnalyticsQueryTime, OperatingSystem}; + use super::{query::AllowedKey, AnalyticsQuery, OperatingSystem}; use bytes::BytesMut; - use chrono::{DateTime, NaiveDateTime, Timelike, Utc}; use std::error::Error; use tokio_postgres::types::{accepts, to_sql_checked, FromSql, IsNull, ToSql, Type}; + impl AnalyticsQuery { + pub fn get_key(&self, key: AllowedKey) -> Option> { + match key { + AllowedKey::CampaignId => self.campaign_id.map(Into::into), + AllowedKey::AdUnit => self.ad_unit.map(Into::into), + AllowedKey::AdSlot => self.ad_slot.map(Into::into), + AllowedKey::AdSlotType => self.ad_slot_type.clone().map(Into::into), + AllowedKey::Advertiser => self.advertiser.map(Into::into), + AllowedKey::Publisher => self.publisher.map(Into::into), + AllowedKey::Hostname => self.hostname.clone().map(Into::into), + AllowedKey::Country => self.country.clone().map(Into::into), + AllowedKey::OsName => self.os_name.clone().map(Into::into), + } + } + } + impl<'a> FromSql<'a> for OperatingSystem { fn from_sql(ty: &Type, raw: &'a [u8]) -> Result> { let str_slice = <&str as FromSql>::from_sql(ty, raw)?; @@ -41,49 +58,36 @@ pub mod postgres { to_sql_checked!(); } - impl ToSql for AnalyticsQueryKey { + impl ToSql for AllowedKey { fn to_sql( &self, ty: &Type, w: &mut BytesMut, ) -> Result> { - match self { - Self::CampaignId(id) => id.to_sql(ty, w), - Self::IPFS(ipfs) => ipfs.to_sql(ty, w), - Self::String(value) => value.to_sql(ty, w), - Self::Address(addr) => addr.to_sql(ty, w), - Self::OperatingSystem(os_name) => os_name.to_sql(ty, w), - } + self.to_string().to_sql(ty, w) } accepts!(TEXT, VARCHAR); to_sql_checked!(); } - impl ToSql for AnalyticsQueryTime { - fn to_sql( - &self, - ty: &Type, - w: &mut BytesMut, - ) -> Result> { - match self { - Self::Date(datehour) => datehour.to_sql(ty, w), - Self::Timestamp(ts) => { - // Create a NaiveDateTime from the timestamp - let naive = NaiveDateTime::from_timestamp(*ts, 0); - // Create a normal DateTime from the NaiveDateTime - let datetime: DateTime = DateTime::from_utc(naive, Utc); - datetime.date().and_hms(datetime.hour(), 0, 0).to_sql(ty, w) - } - } + impl<'a> FromSql<'a> for AllowedKey { + fn from_sql(ty: &Type, raw: &'a [u8]) -> Result> { + let allowed_key_string = String::from_sql(ty, raw)?; + + let allowed_key = + serde_json::from_value(serde_json::Value::String(allowed_key_string))?; + + Ok(allowed_key) } - accepts!(TIMESTAMPTZ); - to_sql_checked!(); + accepts!(TEXT, VARCHAR); } } -#[derive(Debug, Serialize, Deserialize)] +pub mod query; + +#[derive(Debug, Serialize, Deserialize, Clone)] #[serde(rename_all = "camelCase")] pub struct AnalyticsQuery { #[serde(default = "default_limit")] @@ -94,31 +98,31 @@ pub struct AnalyticsQuery { pub metric: Metric, #[serde(default = "default_timeframe")] pub timeframe: Timeframe, - pub segment_by: Option, - #[serde(default)] - #[serde(deserialize_with = "deserialize_query_time")] - pub start: Option, - #[serde(default)] - #[serde(deserialize_with = "deserialize_query_time")] - pub end: Option, + pub segment_by: Option, + /// The default value used will be [`DateHour::now`] - [`AnalyticsQuery::timeframe`] + /// For this query parameter you can use either: + /// - a string with RFC 3339 and ISO 8601 format (see [`chrono::DateTime::parse_from_rfc3339`]) + /// - a timestamp in milliseconds + /// **Note:** [`DateHour`] rules should be uphold, this means that passed values should always be rounded to hours + /// the should not contain **minutes**, **seconds** or **nanoseconds** + // TODO: When deserializing AnalyticsQuery, take timeframe & timezone into account and impl Default value + // #[serde(default, deserialize_with = "deserialize_query_time")] + pub start: Option>, + // #[serde(default, deserialize_with = "deserialize_query_time")] + pub end: Option>, + // #[serde(flatten)] + // pub time: Time, // #[serde(default = "default_timezone")] // pub timezone: String, - pub campaign_id: Option, - pub ad_unit: Option, - pub ad_slot: Option, - pub ad_slot_type: Option, - pub advertiser: Option, - pub publisher: Option, - pub hostname: Option, - pub country: Option, - pub os_name: Option, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(untagged, rename_all = "camelCase")] -pub enum AnalyticsQueryTime { - Date(DateHour), - Timestamp(i64), + pub campaign_id: Option, + pub ad_unit: Option, + pub ad_slot: Option, + pub ad_slot_type: Option, + pub advertiser: Option
, + pub publisher: Option
, + pub hostname: Option, + pub country: Option, + pub os_name: Option, } #[derive(Debug, Serialize, Deserialize)] @@ -131,23 +135,6 @@ pub enum AnalyticsQueryKey { OperatingSystem(OperatingSystem), } -impl AnalyticsQuery { - pub fn get_key(&self, key: &str) -> Option<&AnalyticsQueryKey> { - match key { - "campaignId" => self.campaign_id.as_ref(), - "adUnit" => self.ad_unit.as_ref(), - "adSlot" => self.ad_slot.as_ref(), - "adSlotType" => self.ad_slot_type.as_ref(), - "advertiser" => self.advertiser.as_ref(), - "publisher" => self.publisher.as_ref(), - "hostname" => self.hostname.as_ref(), - "country" => self.country.as_ref(), - "osName" => self.os_name.as_ref(), - _ => None, - } - } -} - #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Display, Hash, Eq)] #[serde(untagged, into = "String", from = "String")] pub enum OperatingSystem { @@ -157,7 +144,7 @@ pub enum OperatingSystem { Other, } -#[derive(Debug, Clone, Serialize, Deserialize, Display)] +#[derive(Debug, Clone, Serialize, Deserialize, Display, PartialEq, Eq)] #[serde(rename_all = "camelCase")] pub enum Timeframe { Year, @@ -181,17 +168,7 @@ pub enum AuthenticateAs { Publisher(ValidatorId), } -impl AuthenticateAs { - pub fn try_from(key: &str, uid: ValidatorId) -> Option { - match key { - "advertiser" => Some(Self::Advertiser(uid)), - "publisher" => Some(Self::Publisher(uid)), - // TODO: Should we throw an error here - _ => None, - } - } -} - +// TODO: Move the postgres module impl Metric { pub fn column_name(self) -> String { match self { @@ -311,25 +288,37 @@ fn default_timeframe() -> Timeframe { Timeframe::Day } -fn deserialize_query_time<'de, D>(deserializer: D) -> Result, D::Error> +fn deserialize_query_time<'de, D>(deserializer: D) -> Result>, D::Error> where D: Deserializer<'de>, { - let date_as_str = String::deserialize(deserializer)?; - let naive = NaiveDateTime::parse_from_str(&date_as_str, "%Y-%m-%dT%H:%M:%SZ"); - match naive { - Ok(naive) => { - let datetime: DateTime = DateTime::from_utc(naive, Utc); - let dh = DateHour::try_from(datetime).map_err(serde::de::Error::custom)?; - Ok(Some(AnalyticsQueryTime::Date(dh))) - } - _ => { - let timestamp = date_as_str - .parse::() - .map_err(serde::de::Error::custom)?; - Ok(Some(AnalyticsQueryTime::Timestamp(timestamp))) - } - } + // let date_as_str = match Option::<&str>::deserialize(deserializer)? { + // Some(value) => value, + // // return early with None + // None => return Ok(None), + // }; + + let datehour = match ts_milliseconds_option::deserialize(deserializer) { + Ok(Some(datetime)) => DateHour::try_from(datetime).map_err(serde::de::Error::custom)?, + // return early with None + Ok(None) => return Ok(None), + // if we have an error trying to parse the value as milliseconds + // try to deserialize from string + Err(_err) => todo!(), + // match Option::<&str>::deserialize(deserializer)? { + // Some(value) => { + // let datetime = DateTime::parse_from_rfc3339(value) + // .map(|fixed| DateTime::::from(fixed)) + // .map_err(serde::de::Error::custom)?; + + // DateHour::try_from(datetime).map_err(serde::de::Error::custom)? + // } + // // return early with None + // None => return Ok(None), + // }, + }; + + Ok(Some(datehour)) } // fn default_timezone() -> String { diff --git a/primitives/src/analytics/query.rs b/primitives/src/analytics/query.rs new file mode 100644 index 000000000..032a2986e --- /dev/null +++ b/primitives/src/analytics/query.rs @@ -0,0 +1,178 @@ +use chrono::Utc; +use once_cell::sync::Lazy; +use serde::{Deserialize, Serialize}; +use std::{collections::HashSet, fmt}; + +use crate::sentry::DateHour; + +use super::Timeframe; + +/// When adding new [`AllowedKey`] make sure to update the [`ALLOWED_KEYS`] static value. +#[derive(Debug, Serialize, Deserialize, Hash, PartialEq, Eq, Clone, Copy)] +#[serde(rename_all = "camelCase")] +pub enum AllowedKey { + CampaignId, + AdUnit, + AdSlot, + AdSlotType, + Advertiser, + Publisher, + Hostname, + Country, + OsName, +} + +impl fmt::Display for AllowedKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let json_value = serde_json::to_value(self).expect("Should never fail serialization!"); + let string = json_value + .as_str() + .expect("Json value should always be String!"); + + f.write_str(&string) + } +} + +/// All [`AllowedKey`]s should be present in this static variable. +pub static ALLOWED_KEYS: Lazy> = Lazy::new(|| { + vec![ + AllowedKey::CampaignId, + AllowedKey::AdUnit, + AllowedKey::AdSlot, + AllowedKey::AdSlotType, + AllowedKey::Advertiser, + AllowedKey::Publisher, + AllowedKey::Hostname, + AllowedKey::Country, + AllowedKey::OsName, + ] + .into_iter() + .collect() +}); + +#[derive(Debug, Serialize, Clone, PartialEq, Eq)] +pub struct Time { + // pub struct Time { + // #[serde(default = "default_timeframe")] + pub timeframe: Timeframe, + /// The default value used will be [`DateHour::now`] - [`AnalyticsQuery::timeframe`] + /// For this query parameter you can use either: + /// - a string with RFC 3339 and ISO 8601 format (see [`chrono::DateTime::parse_from_rfc3339`]) + /// - a timestamp in milliseconds + /// **Note:** [`DateHour`] rules should be uphold, this means that passed values should always be rounded to hours + /// And it should not contain **minutes**, **seconds** or **nanoseconds** + // TODO: When deserializing AnalyticsQuery, take timeframe & timezone into account and impl Default value + pub start: DateHour, + // #[serde(default, deserialize_with = "deserialize_query_time")] + pub end: Option>, + // we can use `chrono_tz` to support more Timezones when needed. + // #[serde(default = "default_timezone_utc")] + // pub timezone: Tz,//: chrono::TimeZone, +} +mod de { + use crate::{analytics::Timeframe, sentry::DateHour}; + + use super::Time; + use serde::{ + de::{self, MapAccess, Visitor}, + Deserialize, Deserializer, + }; + use std::fmt; + + impl<'de> Deserialize<'de> for Time { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + #[derive(Deserialize)] + #[serde(field_identifier, rename_all = "lowercase")] + enum Field { + Timeframe, + Start, + End, + } + + struct TimeVisitor; + + impl<'de> Visitor<'de> for TimeVisitor { + type Value = Time; + + fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + formatter.write_str("struct Time") + } + + fn visit_map(self, mut map: V) -> Result + where + V: MapAccess<'de>, + { + let mut timeframe = None; + let mut start = None; + let mut end = None; + while let Some(key) = map.next_key()? { + match key { + Field::Timeframe => { + if timeframe.is_some() { + return Err(de::Error::duplicate_field("timeframe")); + } + timeframe = Some(map.next_value()?); + } + Field::Start => { + if start.is_some() { + return Err(de::Error::duplicate_field("start")); + } + start = Some(map.next_value()?); + } + Field::End => { + if end.is_some() { + return Err(de::Error::duplicate_field("end")); + } + end = Some(map.next_value()?); + } + } + } + + let timeframe = timeframe.unwrap_or(Timeframe::Day); + let start = start.unwrap_or_else(|| DateHour::now() - &timeframe); + Ok(Time { + timeframe, + start, + end, + }) + } + } + + const FIELDS: &'static [&'static str] = &["timeframe", "start", "end"]; + deserializer.deserialize_struct("Time", FIELDS, TimeVisitor) + } + } +} + +#[cfg(test)] +mod test { + use serde_json::{from_value, json}; + + use crate::{analytics::Timeframe, sentry::DateHour}; + + use super::Time; + + #[test] + fn deserialize_time() { + // default values for empty JSON object + { + let empty = json!({}); + + let time = from_value::, - allowed_keys: Option>, - auth_as_key: Option, + request_allowed: Option>, + authenticate_as: Option, ) -> Result, ResponseError> { - let query = serde_urlencoded::from_str::(req.uri().query().unwrap_or(""))?; - let period_in_hours = query.timeframe.to_hours(); - let start_date = match query.start { - Some(ref start_date) => start_date.to_owned(), - None => AnalyticsQueryTime::Date(DateHour::now() - period_in_hours), - }; - - let applied_limit = query.limit.min(ANALYTICS_QUERY_LIMIT); - - let not_allowed_keys = match &allowed_keys { - Some(keys) => ALLOWED_KEYS.iter().filter(|k| !keys.contains(k)).collect(), - None => vec![], - }; + let mut query = serde_urlencoded::from_str::(req.uri().query().unwrap_or(""))?; - if let Some(segment_by) = &query.segment_by { - if not_allowed_keys.contains(&segment_by) { - return Err(ResponseError::BadRequest(format!( - "Disallowed segmentBy: {}", - segment_by - ))); - } - if query.get_key(segment_by).is_none() { - return Err(ResponseError::BadRequest( - "SegmentBy is provided but a key is not passed".to_string(), - )); - } - } - - for key in not_allowed_keys { - if query.get_key(key).is_some() { - return Err(ResponseError::BadRequest(format!( - "disallowed key in query: {}", - key - ))); - } - } - - let auth = req.extensions().get::(); + // TODO: Deserialize with default value & timezone directly in AnalyticsQuery + query.start = query + .start + .or_else(|| Some(DateHour::now() - &query.timeframe)); - let auth_as = match (auth_as_key, auth) { - (Some(auth_as_key), Some(auth)) => AuthenticateAs::try_from(&auth_as_key, auth.uid), - (Some(_), None) => { - return Err(ResponseError::BadRequest( - "auth_as_key is provided but there is no Auth object".to_string(), - )) - } - _ => None, - }; + let applied_limit = query.limit.min(ANALYTICS_QUERY_LIMIT); - // TODO: Clean up this logic - let allowed_keys: Vec<&str> = allowed_keys - .unwrap_or_else(|| ALLOWED_KEYS.to_vec()) - .iter() - .map(|k| match k.as_ref() { - "campaignId" => "campaign_id", - "adUnit" => "ad_unit", - "adSlot" => "ad_slot", - "adSlotType" => "ad_slot_type", - "advertiser" => "advertiser", - "publisher" => "publisher", - "hostname" => "hostname", - "osName" => "os_name", - _ => "country", - }) - .collect(); + let allowed_keys: HashSet = request_allowed.unwrap_or_else(|| ALLOWED_KEYS.clone()); let analytics = get_analytics( &app.pool, - &start_date, - &query, + query.clone(), allowed_keys, - auth_as, + authenticate_as, applied_limit, ) .await?; - let output = split_entries_by_timeframe(analytics, period_in_hours, &query.segment_by); + // let output = split_entries_by_timeframe(analytics, query.timeframe.to_hours(), query.segment_by.clone()); - Ok(success_response(serde_json::to_string(&output)?)) + // Ok(success_response(serde_json::to_string(&output)?)) + Ok(success_response(serde_json::to_string(&analytics)?)) } // TODO: This logic can be simplified or done in the SQL query -fn split_entries_by_timeframe( - mut analytics: Vec, - period_in_hours: i64, - segment: &Option, -) -> Vec { - let mut res: Vec = vec![]; - let period_in_hours = period_in_hours as usize; - // TODO: If there is an hour with no events this logic will fail - // FIX BEFORE MERGE! - while analytics.len() > period_in_hours { - let drain_index = analytics.len() - period_in_hours; - let analytics_fraction: Vec = analytics.drain(drain_index..).collect(); - let merged_analytics = merge_analytics(analytics_fraction, segment); - res.push(merged_analytics); - } +// fn split_entries_by_timeframe( +// mut analytics: Vec, +// period_in_hours: i64, +// segment: Option, +// ) -> Vec { +// let mut res: Vec = vec![]; +// let period_in_hours = period_in_hours as usize; +// // TODO: If there is an hour with no events this logic will fail +// // FIX BEFORE MERGE! +// while analytics.len() > period_in_hours { +// let drain_index = analytics.len() - period_in_hours; +// let analytics_fraction: Vec = analytics.drain(drain_index..).collect(); +// let merged_analytics = merge_analytics(analytics_fraction, segment); +// res.push(merged_analytics); +// } - if !analytics.is_empty() { - let merged_analytics = merge_analytics(analytics, segment); - res.push(merged_analytics); - } - res -} +// if !analytics.is_empty() { +// let merged_analytics = merge_analytics(analytics, segment); +// res.push(merged_analytics); +// } +// res +// } -fn merge_analytics(analytics: Vec, segment: &Option) -> FetchedAnalytics { - let mut amount = UnifiedNum::from_u64(0); - analytics - .iter() - .for_each(|a| amount = amount.checked_add(&a.value).expect("TODO: Use result here")); - FetchedAnalytics { - time: analytics.get(0).unwrap().time, - value: amount, - segment: segment.clone(), - } -} +// fn merge_analytics(analytics: Vec, segment: Option) -> FetchedAnalytics { +// let mut amount = UnifiedNum::from_u64(0); +// analytics +// .iter() +// .for_each(|a| amount = amount.checked_add(&a.value).expect("TODO: Use result here")); +// FetchedAnalytics { +// time: analytics.get(0).unwrap().time, +// value: amount, +// segment, +// } +// } // async fn cache( // redis: &MultiplexedConnection, @@ -175,7 +110,7 @@ mod test { use chrono::{Timelike, Utc}; use primitives::{ analytics::{AnalyticsQueryKey, Metric, OperatingSystem, Timeframe}, - sentry::UpdateAnalytics, + sentry::{FetchedAnalytics, UpdateAnalytics}, util::tests::prep_db::{ADDRESSES, DUMMY_CAMPAIGN, DUMMY_IPFS}, }; @@ -351,7 +286,11 @@ mod test { let analytics_response = analytics( req, &app, - Some(vec!["country".into(), "ad_slot_type".into()]), + Some( + vec![AllowedKey::Country, AllowedKey::AdSlotType] + .into_iter() + .collect(), + ), None, ) .await @@ -377,7 +316,7 @@ mod test { metric: Metric::Count, timeframe: Timeframe::Day, segment_by: None, - start: Some(AnalyticsQueryTime::Date(start_date)), + start: Some(start_date), end: None, campaign_id: None, ad_unit: None, @@ -398,7 +337,11 @@ mod test { let analytics_response = analytics( req, &app, - Some(vec!["country".into(), "ad_slot_type".into()]), + Some( + vec![AllowedKey::Country, AllowedKey::AdSlotType] + .into_iter() + .collect(), + ), None, ) .await @@ -424,7 +367,7 @@ mod test { timeframe: Timeframe::Day, segment_by: None, start: None, - end: Some(AnalyticsQueryTime::Date(end_date)), + end: Some(end_date), campaign_id: None, ad_unit: None, ad_slot: None, @@ -444,7 +387,11 @@ mod test { let analytics_response = analytics( req, &app, - Some(vec!["country".into(), "ad_slot_type".into()]), + Some( + vec![AllowedKey::Country, AllowedKey::AdSlotType] + .into_iter() + .collect(), + ), None, ) .await @@ -472,8 +419,8 @@ mod test { metric: Metric::Count, timeframe: Timeframe::Day, segment_by: None, - start: Some(AnalyticsQueryTime::Date(start_date)), - end: Some(AnalyticsQueryTime::Date(end_date)), + start: Some(start_date), + end: Some(end_date), campaign_id: None, ad_unit: None, ad_slot: None, @@ -492,7 +439,11 @@ mod test { let analytics_response = analytics( req, &app, - Some(vec!["country".into(), "ad_slot_type".into()]), + Some( + vec![AllowedKey::Country, AllowedKey::AdSlotType] + .into_iter() + .collect(), + ), None, ) .await @@ -515,7 +466,7 @@ mod test { event_type: "CLICK".into(), metric: Metric::Count, timeframe: Timeframe::Day, - segment_by: Some("country".into()), + segment_by: Some(AllowedKey::Country), start: None, end: None, campaign_id: None, @@ -525,7 +476,7 @@ mod test { advertiser: None, publisher: None, hostname: None, - country: Some(AnalyticsQueryKey::String("Bulgaria".into())), + country: Some("Bulgaria".into()), os_name: None, }; let query = serde_urlencoded::to_string(query).expect("should parse query"); @@ -537,7 +488,11 @@ mod test { let analytics_response = analytics( req, &app, - Some(vec!["country".into(), "ad_slot_type".into()]), + Some( + vec![AllowedKey::Country, AllowedKey::AdSlotType] + .into_iter() + .collect(), + ), None, ) .await @@ -563,7 +518,11 @@ mod test { let analytics_response = analytics( req, &app, - Some(vec!["country".into(), "ad_slot_type".into()]), + Some( + vec![AllowedKey::Country, AllowedKey::AdSlotType] + .into_iter() + .collect(), + ), None, ) .await; @@ -583,7 +542,11 @@ mod test { let analytics_response = analytics( req, &app, - Some(vec!["country".into(), "ad_slot_type".into()]), + Some( + vec![AllowedKey::Country, AllowedKey::AdSlotType] + .into_iter() + .collect(), + ), None, ) .await; @@ -603,7 +566,11 @@ mod test { let analytics_response = analytics( req, &app, - Some(vec!["country".into(), "ad_slot_type".into()]), + Some( + vec![AllowedKey::Country, AllowedKey::AdSlotType] + .into_iter() + .collect(), + ), None, ) .await; @@ -623,7 +590,11 @@ mod test { let analytics_response = analytics( req, &app, - Some(vec!["country".into(), "ad_slot_type".into()]), + Some( + vec![AllowedKey::Country, AllowedKey::AdSlotType] + .into_iter() + .collect(), + ), None, ) .await @@ -649,7 +620,11 @@ mod test { let analytics_response = analytics( req, &app, - Some(vec!["country".into(), "ad_slot_type".into()]), + Some( + vec![AllowedKey::Country, AllowedKey::AdSlotType] + .into_iter() + .collect(), + ), None, ) .await @@ -675,7 +650,11 @@ mod test { let analytics_response = analytics( req, &app, - Some(vec!["country".into(), "ad_slot_type".into()]), + Some( + vec![AllowedKey::Country, AllowedKey::AdSlotType] + .into_iter() + .collect(), + ), None, ) .await @@ -702,7 +681,11 @@ mod test { let analytics_response = analytics( req, &app, - Some(vec!["country".into(), "ad_slot_type".into()]), + Some( + vec![AllowedKey::Country, AllowedKey::AdSlotType] + .into_iter() + .collect(), + ), None, ) .await @@ -727,7 +710,11 @@ mod test { let analytics_response = analytics( req, &app, - Some(vec!["country".into(), "ad_slot_type".into()]), + Some( + vec![AllowedKey::Country, AllowedKey::AdSlotType] + .into_iter() + .collect(), + ), None, ) .await @@ -754,12 +741,8 @@ mod test { metric: Metric::Count, timeframe: Timeframe::Day, segment_by: None, - start: Some(AnalyticsQueryTime::Timestamp( - start_date.to_datetime().timestamp(), - )), - end: Some(AnalyticsQueryTime::Timestamp( - end_date.to_datetime().timestamp(), - )), + start: Some(start_date), + end: Some(end_date), campaign_id: None, ad_unit: None, ad_slot: None, @@ -778,7 +761,11 @@ mod test { let analytics_response = analytics( req, &app, - Some(vec!["country".into(), "ad_slot_type".into()]), + Some( + vec![AllowedKey::Country, AllowedKey::AdSlotType] + .into_iter() + .collect(), + ), None, ) .await @@ -959,9 +946,14 @@ mod test { .body(Body::empty()) .expect("Should build Request"); - let analytics_response = analytics(req, &app, None, Some("publisher".to_string())) - .await - .expect("Should get analytics data"); + let analytics_response = analytics( + req, + &app, + None, + Some(AuthenticateAs::Publisher(publisher_auth.uid)), + ) + .await + .expect("Should get analytics data"); let json = hyper::body::to_bytes(analytics_response.into_body()) .await .expect("Should get json"); @@ -980,9 +972,14 @@ mod test { .body(Body::empty()) .expect("Should build Request"); - let analytics_response = analytics(req, &app, None, Some("advertiser".to_string())) - .await - .expect("Should get analytics data"); + let analytics_response = analytics( + req, + &app, + None, + Some(AuthenticateAs::Advertiser(publisher_auth.uid)), + ) + .await + .expect("Should get analytics data"); let json = hyper::body::to_bytes(analytics_response.into_body()) .await .expect("Should get json"); @@ -1023,20 +1020,18 @@ mod test { event_type: "CLICK".into(), metric: Metric::Count, timeframe: Timeframe::Day, - segment_by: Some("country".into()), - start: Some(AnalyticsQueryTime::Date(start_date)), - end: Some(AnalyticsQueryTime::Date(end_date)), - campaign_id: Some(AnalyticsQueryKey::CampaignId(DUMMY_CAMPAIGN.id)), - ad_unit: Some(AnalyticsQueryKey::IPFS(DUMMY_IPFS[0])), - ad_slot: Some(AnalyticsQueryKey::IPFS(DUMMY_IPFS[1])), - ad_slot_type: Some(AnalyticsQueryKey::String("TEST_TYPE".into())), - advertiser: Some(AnalyticsQueryKey::Address(ADDRESSES["creator"])), - publisher: Some(AnalyticsQueryKey::Address(ADDRESSES["publisher"])), - hostname: Some(AnalyticsQueryKey::String("localhost".into())), - country: Some(AnalyticsQueryKey::String("Bulgaria".into())), - os_name: Some(AnalyticsQueryKey::OperatingSystem(OperatingSystem::map_os( - "Windows", - ))), + segment_by: Some(AllowedKey::Country), + start: Some(start_date), + end: Some(end_date), + campaign_id: Some(DUMMY_CAMPAIGN.id), + ad_unit: Some(DUMMY_IPFS[0]), + ad_slot: Some(DUMMY_IPFS[1]), + ad_slot_type: Some("TEST_TYPE".into()), + advertiser: Some(ADDRESSES["creator"]), + publisher: Some(ADDRESSES["publisher"]), + hostname: Some("localhost".into()), + country: Some("Bulgaria".into()), + os_name: Some(OperatingSystem::map_os("Windows")), }; let query = serde_urlencoded::to_string(query).expect("should parse query"); let req = Request::builder() @@ -1058,17 +1053,19 @@ mod test { fetched_analytics.get(0).unwrap().value, UnifiedNum::from_u64(1) ); - // test with no authUid - let req = Request::builder() - .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day") - .body(Body::empty()) - .expect("Should build Request"); - let analytics_response = analytics(req, &app, None, Some("publisher".to_string())).await; - let err_msg = "auth_as_key is provided but there is no Auth object".to_string(); - assert!(matches!( - analytics_response, - Err(ResponseError::BadRequest(err_msg)) - )); + // TODO: Move test to a analytics_router test + // test with no authUid + // let req = Request::builder() + // .uri("http://127.0.0.1/analytics?limit=100&eventType=CLICK&metric=count&timeframe=day") + // .body(Body::empty()) + // .expect("Should build Request"); + + // let analytics_response = analytics(req, &app, None, Some(AuthenticateAs::Publisher())).await; + // let err_msg = "auth_as_key is provided but there is no Auth object".to_string(); + // assert!(matches!( + // analytics_response, + // Err(ResponseError::BadRequest(err_msg)) + // )); } } From 1fea534f20fcbadadac3c21de9de34e183f90eef Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Tue, 4 Jan 2022 10:46:27 +0200 Subject: [PATCH 20/24] adapter - fix dummy warning --- adapter/src/dummy.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/adapter/src/dummy.rs b/adapter/src/dummy.rs index 8ca654ec3..9ee0beea5 100644 --- a/adapter/src/dummy.rs +++ b/adapter/src/dummy.rs @@ -13,7 +13,7 @@ use std::{collections::HashMap, fmt, sync::Arc}; #[derive(Debug, Clone)] pub struct DummyAdapter { identity: ValidatorId, - config: Config, + _config: Config, // Auth tokens that we have verified (tokenId => session) session_tokens: HashMap, // Auth tokens that we've generated to authenticate with someone (address => token) @@ -61,7 +61,7 @@ impl DummyAdapter { pub fn init(opts: DummyAdapterOptions, config: &Config) -> Self { Self { identity: opts.dummy_identity, - config: config.to_owned(), + _config: config.to_owned(), session_tokens: opts.dummy_auth, authorization_tokens: opts.dummy_auth_tokens, deposits: Default::default(), From 7d6a27650411c31d5d7900aa29acc5e44f50cc4c Mon Sep 17 00:00:00 2001 From: Lachezar Lechev Date: Mon, 10 Jan 2022 09:24:18 +0200 Subject: [PATCH 21/24] Analytics changes: - primitives - DateHour - PartialEq/PartialOrd - primitives - AllowedKey impls & use Time - sentry - migrations - rename column `os` to `os_name` - sentry - db - analytics - improve SQL query and prepare testing --- primitives/src/analytics.rs | 94 ++-------- primitives/src/analytics/query.rs | 161 +++++++++++++++--- primitives/src/sentry.rs | 76 ++++++++- .../20190806011140_initial-tables/up.sql | 6 +- sentry/src/db/analytics.rs | 148 +++++++++++++--- sentry/src/routes/analytics.rs | 96 ++++++----- 6 files changed, 405 insertions(+), 176 deletions(-) diff --git a/primitives/src/analytics.rs b/primitives/src/analytics.rs index 774aeeee1..82e4b22f2 100644 --- a/primitives/src/analytics.rs +++ b/primitives/src/analytics.rs @@ -1,9 +1,8 @@ -use crate::{sentry::DateHour, Address, CampaignId, ValidatorId, IPFS}; -use chrono::{serde::ts_milliseconds_option, Utc}; +use crate::{Address, CampaignId, ValidatorId, IPFS}; use parse_display::Display; -use serde::{Deserialize, Deserializer, Serialize}; +use serde::{Deserialize, Serialize}; -use self::query::AllowedKey; +use self::query::{AllowedKey, Time}; pub const ANALYTICS_QUERY_LIMIT: u32 = 200; @@ -17,15 +16,15 @@ pub mod postgres { impl AnalyticsQuery { pub fn get_key(&self, key: AllowedKey) -> Option> { match key { - AllowedKey::CampaignId => self.campaign_id.map(Into::into), - AllowedKey::AdUnit => self.ad_unit.map(Into::into), - AllowedKey::AdSlot => self.ad_slot.map(Into::into), - AllowedKey::AdSlotType => self.ad_slot_type.clone().map(Into::into), - AllowedKey::Advertiser => self.advertiser.map(Into::into), - AllowedKey::Publisher => self.publisher.map(Into::into), - AllowedKey::Hostname => self.hostname.clone().map(Into::into), - AllowedKey::Country => self.country.clone().map(Into::into), - AllowedKey::OsName => self.os_name.clone().map(Into::into), + AllowedKey::CampaignId => self.campaign_id.map(|campaign_id| Box::new(campaign_id) as _), + AllowedKey::AdUnit => self.ad_unit.map(|ad_unit| Box::new(ad_unit) as _), + AllowedKey::AdSlot => self.ad_slot.map(|ad_slot| Box::new(ad_slot) as _), + AllowedKey::AdSlotType => self.ad_slot_type.clone().map(|ad_slot_type| Box::new(ad_slot_type) as _), + AllowedKey::Advertiser => self.advertiser.map(|advertiser| Box::new(advertiser) as _), + AllowedKey::Publisher => self.publisher.map(|publisher| Box::new(publisher) as _), + AllowedKey::Hostname => self.hostname.clone().map(|hostname| Box::new(hostname) as _), + AllowedKey::Country => self.country.clone().map(|country| Box::new(country) as _), + AllowedKey::OsName => self.os_name.clone().map(|os_name| Box::new(os_name) as _), } } } @@ -73,12 +72,9 @@ pub mod postgres { impl<'a> FromSql<'a> for AllowedKey { fn from_sql(ty: &Type, raw: &'a [u8]) -> Result> { - let allowed_key_string = String::from_sql(ty, raw)?; + let allowed_key_str = <&'a str as FromSql>::from_sql(ty, raw)?; - let allowed_key = - serde_json::from_value(serde_json::Value::String(allowed_key_string))?; - - Ok(allowed_key) + Ok(allowed_key_str.parse()?) } accepts!(TEXT, VARCHAR); @@ -96,24 +92,9 @@ pub struct AnalyticsQuery { pub event_type: String, #[serde(default = "default_metric")] pub metric: Metric, - #[serde(default = "default_timeframe")] - pub timeframe: Timeframe, pub segment_by: Option, - /// The default value used will be [`DateHour::now`] - [`AnalyticsQuery::timeframe`] - /// For this query parameter you can use either: - /// - a string with RFC 3339 and ISO 8601 format (see [`chrono::DateTime::parse_from_rfc3339`]) - /// - a timestamp in milliseconds - /// **Note:** [`DateHour`] rules should be uphold, this means that passed values should always be rounded to hours - /// the should not contain **minutes**, **seconds** or **nanoseconds** - // TODO: When deserializing AnalyticsQuery, take timeframe & timezone into account and impl Default value - // #[serde(default, deserialize_with = "deserialize_query_time")] - pub start: Option>, - // #[serde(default, deserialize_with = "deserialize_query_time")] - pub end: Option>, - // #[serde(flatten)] - // pub time: Time, - // #[serde(default = "default_timezone")] - // pub timezone: String, + #[serde(flatten)] + pub time: Time, pub campaign_id: Option, pub ad_unit: Option, pub ad_slot: Option, @@ -168,8 +149,8 @@ pub enum AuthenticateAs { Publisher(ValidatorId), } -// TODO: Move the postgres module impl Metric { + #[cfg(feature = "postgres")] pub fn column_name(self) -> String { match self { Metric::Count => "payout_count".to_string(), @@ -284,47 +265,6 @@ fn default_metric() -> Metric { Metric::Count } -fn default_timeframe() -> Timeframe { - Timeframe::Day -} - -fn deserialize_query_time<'de, D>(deserializer: D) -> Result>, D::Error> -where - D: Deserializer<'de>, -{ - // let date_as_str = match Option::<&str>::deserialize(deserializer)? { - // Some(value) => value, - // // return early with None - // None => return Ok(None), - // }; - - let datehour = match ts_milliseconds_option::deserialize(deserializer) { - Ok(Some(datetime)) => DateHour::try_from(datetime).map_err(serde::de::Error::custom)?, - // return early with None - Ok(None) => return Ok(None), - // if we have an error trying to parse the value as milliseconds - // try to deserialize from string - Err(_err) => todo!(), - // match Option::<&str>::deserialize(deserializer)? { - // Some(value) => { - // let datetime = DateTime::parse_from_rfc3339(value) - // .map(|fixed| DateTime::::from(fixed)) - // .map_err(serde::de::Error::custom)?; - - // DateHour::try_from(datetime).map_err(serde::de::Error::custom)? - // } - // // return early with None - // None => return Ok(None), - // }, - }; - - Ok(Some(datehour)) -} - -// fn default_timezone() -> String { -// "UTC".into() -// } - #[cfg(test)] mod test { use super::*; diff --git a/primitives/src/analytics/query.rs b/primitives/src/analytics/query.rs index 032a2986e..b097551e0 100644 --- a/primitives/src/analytics/query.rs +++ b/primitives/src/analytics/query.rs @@ -1,15 +1,20 @@ use chrono::Utc; use once_cell::sync::Lazy; +use parse_display::{Display, FromStr}; use serde::{Deserialize, Serialize}; -use std::{collections::HashSet, fmt}; +use std::collections::HashSet; use crate::sentry::DateHour; use super::Timeframe; /// When adding new [`AllowedKey`] make sure to update the [`ALLOWED_KEYS`] static value. -#[derive(Debug, Serialize, Deserialize, Hash, PartialEq, Eq, Clone, Copy)] +/// When (De)Serializing we use `camelCase`, +/// however, when displaying and parsing the value, we use `snake_case`. +/// The later is particular useful when using the value as column in SQL. +#[derive(Debug, Serialize, Deserialize, Hash, PartialEq, Eq, Clone, Copy, Display, FromStr)] #[serde(rename_all = "camelCase")] +#[display(style = "snake_case")] pub enum AllowedKey { CampaignId, AdUnit, @@ -22,17 +27,6 @@ pub enum AllowedKey { OsName, } -impl fmt::Display for AllowedKey { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let json_value = serde_json::to_value(self).expect("Should never fail serialization!"); - let string = json_value - .as_str() - .expect("Json value should always be String!"); - - f.write_str(&string) - } -} - /// All [`AllowedKey`]s should be present in this static variable. pub static ALLOWED_KEYS: Lazy> = Lazy::new(|| { vec![ @@ -50,20 +44,52 @@ pub static ALLOWED_KEYS: Lazy> = Lazy::new(|| { .collect() }); +// fn deserialize_query_time<'de, D>(deserializer: D) -> Result>, D::Error> +// where +// D: Deserializer<'de>, +// { +// // let date_as_str = match Option::<&str>::deserialize(deserializer)? { +// // Some(value) => value, +// // // return early with None +// // None => return Ok(None), +// // }; + +// let datehour = match ts_milliseconds_option::deserialize(deserializer) { +// Ok(Some(datetime)) => DateHour::try_from(datetime).map_err(serde::de::Error::custom)?, +// // return early with None +// Ok(None) => return Ok(None), +// // if we have an error trying to parse the value as milliseconds +// // try to deserialize from string +// Err(_err) => todo!(), +// // match Option::<&str>::deserialize(deserializer)? { +// // Some(value) => { +// // let datetime = DateTime::parse_from_rfc3339(value) +// // .map(|fixed| DateTime::::from(fixed)) +// // .map_err(serde::de::Error::custom)?; + +// // DateHour::try_from(datetime).map_err(serde::de::Error::custom)? +// // } +// // // return early with None +// // None => return Ok(None), +// // }, +// }; + +// Ok(Some(datehour)) +// } + #[derive(Debug, Serialize, Clone, PartialEq, Eq)] pub struct Time { - // pub struct Time { - // #[serde(default = "default_timeframe")] + /// Default: [`Timeframe::Day`]. pub timeframe: Timeframe, - /// The default value used will be [`DateHour::now`] - [`AnalyticsQuery::timeframe`] + /// Default value: [`DateHour::now`] - `self.timeframe` /// For this query parameter you can use either: /// - a string with RFC 3339 and ISO 8601 format (see [`chrono::DateTime::parse_from_rfc3339`]) /// - a timestamp in milliseconds /// **Note:** [`DateHour`] rules should be uphold, this means that passed values should always be rounded to hours /// And it should not contain **minutes**, **seconds** or **nanoseconds** - // TODO: When deserializing AnalyticsQuery, take timeframe & timezone into account and impl Default value + // TODO: Either Timestamp (number) or DateTime (string) de/serialization pub start: DateHour, - // #[serde(default, deserialize_with = "deserialize_query_time")] + /// End DateHour should be after Start DateHour! pub end: Option>, // we can use `chrono_tz` to support more Timezones when needed. // #[serde(default = "default_timezone_utc")] @@ -133,6 +159,17 @@ mod de { let timeframe = timeframe.unwrap_or(Timeframe::Day); let start = start.unwrap_or_else(|| DateHour::now() - &timeframe); + + // if there is an End time passed, check if End is > Start + match end { + Some(end) if start > end => { + return Err(de::Error::custom( + "End time should be larger than the Start time", + )); + } + _ => {} + } + Ok(Time { timeframe, start, @@ -172,7 +209,91 @@ mod test { ); } - // default values for some fields - {} + // `Start` default value and no `End` + { + let timeframe_only = json!({ + "timeframe": "week", + }); + + let time = from_value::