Skip to content

Commit b6344f3

Browse files
authored
Merge pull request #461 from AmbireTech/analytics-routes
Analytics routes
2 parents f0f08d4 + b647999 commit b6344f3

File tree

20 files changed

+2835
-556
lines changed

20 files changed

+2835
-556
lines changed

Cargo.lock

+43-8
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

+3
Original file line numberDiff line numberDiff line change
@@ -8,3 +8,6 @@ members = [
88
"sentry",
99
"test_harness",
1010
]
11+
12+
[patch.crates-io]
13+
postgres-types = { git = "https://github.com/elpiel/rust-postgres", branch = "boxed-dyn-ToSql"}

docs/config/dev.toml

+1
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@ ethereum_network = 'http://localhost:8545'
3636

3737
creators_whitelist = []
3838
validators_whitelist = []
39+
admins = ['0xce07CbB7e054514D590a0262C93070D838bFBA2e']
3940

4041
[[token_address_whitelist]]
4142
# DAI

docs/config/ganache.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ ethereum_network = 'http://localhost:8545'
3838

3939
creators_whitelist = []
4040
validators_whitelist = []
41-
41+
admins = ['0x80690751969B234697e9059e04ed72195c3507fa']
4242

4343
[[token_address_whitelist]]
4444
# Mocked TOKEN

docs/config/prod.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ ethereum_network = 'http://localhost:8545'
3737

3838
creators_whitelist = []
3939
validators_whitelist = []
40-
40+
admins = ['0x5d6A3F1AD7b124ecDFDf4841D9bB246eD5fBF04c']
4141

4242
[[token_address_whitelist]]
4343
# DAI

primitives/Cargo.toml

+1
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ authors = [
55
"Ambire <[email protected]>",
66
"Lachezar Lechev <[email protected]>",
77
"Omidiora Samuel <[email protected]>",
8+
"Simeon Nakov <[email protected]>",
89
]
910
edition = "2021"
1011
license = "AGPL-3.0"

primitives/src/analytics.rs

+132-74
Original file line numberDiff line numberDiff line change
@@ -1,40 +1,43 @@
1-
use crate::{ChannelId, DomainError};
1+
use crate::{
2+
sentry::{EventType, IMPRESSION},
3+
Address, CampaignId, ValidatorId, IPFS,
4+
};
25
use parse_display::Display;
36
use serde::{Deserialize, Serialize};
47

5-
pub const ANALYTICS_QUERY_LIMIT: u32 = 200;
6-
7-
#[derive(Debug, Serialize, Deserialize)]
8-
#[serde(rename_all = "camelCase")]
9-
pub struct AnalyticsData {
10-
pub time: f64,
11-
pub value: String,
12-
#[serde(default, skip_serializing_if = "Option::is_none")]
13-
pub channel_id: Option<ChannelId>,
14-
}
8+
use self::query::{AllowedKey, Time};
159

16-
#[derive(Debug, Serialize, Deserialize)]
17-
pub struct AnalyticsResponse {
18-
pub aggr: Vec<AnalyticsData>,
19-
pub limit: u32,
20-
}
10+
pub const ANALYTICS_QUERY_LIMIT: u32 = 200;
2111

2212
#[cfg(feature = "postgres")]
2313
pub mod postgres {
24-
use super::{AnalyticsData, OperatingSystem};
14+
use super::{query::AllowedKey, AnalyticsQuery, OperatingSystem};
2515
use bytes::BytesMut;
2616
use std::error::Error;
27-
use tokio_postgres::{
28-
types::{accepts, to_sql_checked, FromSql, IsNull, ToSql, Type},
29-
Row,
30-
};
31-
32-
impl From<&Row> for AnalyticsData {
33-
fn from(row: &Row) -> Self {
34-
Self {
35-
time: row.get("time"),
36-
value: row.get("value"),
37-
channel_id: row.try_get("channel_id").ok(),
17+
use tokio_postgres::types::{accepts, to_sql_checked, FromSql, IsNull, ToSql, Type};
18+
19+
impl AnalyticsQuery {
20+
pub fn get_key(&self, key: AllowedKey) -> Option<Box<dyn ToSql + Sync + Send>> {
21+
match key {
22+
AllowedKey::CampaignId => self
23+
.campaign_id
24+
.map(|campaign_id| Box::new(campaign_id) as _),
25+
AllowedKey::AdUnit => self.ad_unit.map(|ad_unit| Box::new(ad_unit) as _),
26+
AllowedKey::AdSlot => self.ad_slot.map(|ad_slot| Box::new(ad_slot) as _),
27+
AllowedKey::AdSlotType => self
28+
.ad_slot_type
29+
.clone()
30+
.map(|ad_slot_type| Box::new(ad_slot_type) as _),
31+
AllowedKey::Advertiser => {
32+
self.advertiser.map(|advertiser| Box::new(advertiser) as _)
33+
}
34+
AllowedKey::Publisher => self.publisher.map(|publisher| Box::new(publisher) as _),
35+
AllowedKey::Hostname => self
36+
.hostname
37+
.clone()
38+
.map(|hostname| Box::new(hostname) as _),
39+
AllowedKey::Country => self.country.clone().map(|country| Box::new(country) as _),
40+
AllowedKey::OsName => self.os_name.clone().map(|os_name| Box::new(os_name) as _),
3841
}
3942
}
4043
}
@@ -66,20 +69,54 @@ pub mod postgres {
6669
accepts!(TEXT, VARCHAR);
6770
to_sql_checked!();
6871
}
72+
73+
impl ToSql for AllowedKey {
74+
fn to_sql(
75+
&self,
76+
ty: &Type,
77+
w: &mut BytesMut,
78+
) -> Result<IsNull, Box<dyn Error + Sync + Send>> {
79+
self.to_string().to_sql(ty, w)
80+
}
81+
82+
accepts!(TEXT, VARCHAR);
83+
to_sql_checked!();
84+
}
85+
86+
impl<'a> FromSql<'a> for AllowedKey {
87+
fn from_sql(ty: &Type, raw: &'a [u8]) -> Result<Self, Box<dyn Error + Sync + Send>> {
88+
let allowed_key_str = <&'a str as FromSql>::from_sql(ty, raw)?;
89+
90+
Ok(allowed_key_str.parse()?)
91+
}
92+
93+
accepts!(TEXT, VARCHAR);
94+
}
6995
}
7096

71-
#[derive(Debug, Deserialize)]
97+
pub mod query;
98+
99+
#[derive(Debug, Serialize, Deserialize, Clone)]
72100
#[serde(rename_all = "camelCase")]
73101
pub struct AnalyticsQuery {
74102
#[serde(default = "default_limit")]
75103
pub limit: u32,
76104
#[serde(default = "default_event_type")]
77-
pub event_type: String,
105+
pub event_type: EventType,
78106
#[serde(default = "default_metric")]
79-
pub metric: String,
80-
#[serde(default = "default_timeframe")]
81-
pub timeframe: String,
82-
pub segment_by_channel: Option<String>,
107+
pub metric: Metric,
108+
pub segment_by: Option<AllowedKey>,
109+
#[serde(flatten)]
110+
pub time: Time,
111+
pub campaign_id: Option<CampaignId>,
112+
pub ad_unit: Option<IPFS>,
113+
pub ad_slot: Option<IPFS>,
114+
pub ad_slot_type: Option<String>,
115+
pub advertiser: Option<Address>,
116+
pub publisher: Option<Address>,
117+
pub hostname: Option<String>,
118+
pub country: Option<String>,
119+
pub os_name: Option<OperatingSystem>,
83120
}
84121

85122
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Display, Hash, Eq)]
@@ -91,6 +128,63 @@ pub enum OperatingSystem {
91128
Other,
92129
}
93130

131+
#[derive(Debug, Clone, Serialize, Deserialize, Display, PartialEq, Eq)]
132+
#[serde(rename_all = "camelCase")]
133+
pub enum Timeframe {
134+
/// [`Timeframe::Year`] returns analytics grouped by month.
135+
Year,
136+
/// [`Timeframe::Month`] returns analytics grouped by day.
137+
Month,
138+
/// [`Timeframe::Week`] returns analytics grouped by hour.
139+
/// Same as [`Timeframe::Day`].
140+
Week,
141+
/// [`Timeframe::Day`] returns analytics grouped by hour.
142+
/// Same as [`Timeframe::Week`].
143+
Day,
144+
}
145+
146+
#[derive(Debug, Clone, Copy, Serialize, Deserialize, Display)]
147+
#[serde(rename_all = "camelCase")]
148+
pub enum Metric {
149+
Count,
150+
Paid,
151+
}
152+
153+
#[derive(Debug, Clone, Copy, Serialize, Deserialize, Display)]
154+
pub enum AuthenticateAs {
155+
#[display("{0}")]
156+
Advertiser(ValidatorId),
157+
#[display("{0}")]
158+
Publisher(ValidatorId),
159+
}
160+
161+
impl Metric {
162+
#[cfg(feature = "postgres")]
163+
/// Returns the query column name of the [`Metric`].
164+
///
165+
/// Available only when the `postgres` feature is enabled.
166+
pub fn column_name(self) -> &'static str {
167+
match self {
168+
Metric::Count => "payout_count",
169+
Metric::Paid => "payout_amount",
170+
}
171+
}
172+
}
173+
174+
impl Timeframe {
175+
pub fn to_hours(&self) -> i64 {
176+
let hour = 1;
177+
let day = 24 * hour;
178+
let year = 365 * day;
179+
match self {
180+
Timeframe::Day => day,
181+
Timeframe::Week => 7 * day,
182+
Timeframe::Month => year / 12,
183+
Timeframe::Year => year,
184+
}
185+
}
186+
}
187+
94188
impl Default for OperatingSystem {
95189
fn default() -> Self {
96190
Self::Other
@@ -171,52 +265,16 @@ impl OperatingSystem {
171265
}
172266
}
173267

174-
impl AnalyticsQuery {
175-
pub fn is_valid(&self) -> Result<(), DomainError> {
176-
let valid_event_types = ["IMPRESSION", "CLICK"];
177-
let valid_metric = ["eventPayouts", "eventCounts"];
178-
let valid_timeframe = ["year", "month", "week", "day", "hour"];
179-
180-
if !valid_event_types.contains(&self.event_type.as_str()) {
181-
Err(DomainError::InvalidArgument(format!(
182-
"invalid event_type, possible values are: {}",
183-
valid_event_types.join(" ,")
184-
)))
185-
} else if !valid_metric.contains(&self.metric.as_str()) {
186-
Err(DomainError::InvalidArgument(format!(
187-
"invalid metric, possible values are: {}",
188-
valid_metric.join(" ,")
189-
)))
190-
} else if !valid_timeframe.contains(&self.timeframe.as_str()) {
191-
Err(DomainError::InvalidArgument(format!(
192-
"invalid timeframe, possible values are: {}",
193-
valid_timeframe.join(" ,")
194-
)))
195-
} else if self.limit > ANALYTICS_QUERY_LIMIT {
196-
Err(DomainError::InvalidArgument(format!(
197-
"invalid limit {}, maximum value 200",
198-
self.limit
199-
)))
200-
} else {
201-
Ok(())
202-
}
203-
}
204-
}
205-
206268
fn default_limit() -> u32 {
207269
100
208270
}
209271

210-
fn default_event_type() -> String {
211-
"IMPRESSION".into()
212-
}
213-
214-
fn default_metric() -> String {
215-
"eventCounts".into()
272+
fn default_event_type() -> EventType {
273+
IMPRESSION
216274
}
217275

218-
fn default_timeframe() -> String {
219-
"hour".into()
276+
fn default_metric() -> Metric {
277+
Metric::Count
220278
}
221279

222280
#[cfg(test)]

0 commit comments

Comments
 (0)