Skip to content

Commit

Permalink
ref: cargo fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
mitsuhiko committed Mar 25, 2018
1 parent 1c28a66 commit 437023c
Show file tree
Hide file tree
Showing 6 changed files with 270 additions and 207 deletions.
2 changes: 1 addition & 1 deletion src/dsn.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use std::str::FromStr;
use url::Url;

use project_id::{ProjectId, ProjectIdParseError};
use auth::{Auth, auth_from_dsn_and_client};
use auth::{auth_from_dsn_and_client, Auth};

/// Represents a dsn url parsing error.
#[derive(Debug, Fail)]
Expand Down
2 changes: 1 addition & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,14 +40,14 @@ extern crate chrono;
extern crate failure;
#[macro_use]
extern crate failure_derive;
extern crate linked_hash_map;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
extern crate url;
extern crate url_serde;
extern crate uuid;
extern crate linked_hash_map;

#[macro_use]
mod macros;
Expand Down
28 changes: 14 additions & 14 deletions src/protocol/v7.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,17 +19,16 @@ use utils::ts_seconds_float;

/// An arbitrary (JSON) value (`serde_json::value::Value`)
pub mod value {
pub use serde_json::value::{Map, Value, Index, Number, from_value, to_value};
pub use serde_json::value::{from_value, to_value, Index, Map, Number, Value};
}

/// The internally use arbitrary data map type (`linked_hash_map::LinkedHashMap`)
///
/// It is currently backed by the `linked-hash-map` crate's hash map so that
/// insertion order is preserved.
pub mod map {
pub use linked_hash_map::{Entries, IntoIter, Iter, IterMut, Keys,
LinkedHashMap, OccupiedEntry,
VacantEntry, Values};
pub use linked_hash_map::{Entries, IntoIter, Iter, IterMut, Keys, LinkedHashMap,
OccupiedEntry, VacantEntry, Values};
}

/// An arbitrary (JSON) value (`serde_json::value::Value`)
Expand Down Expand Up @@ -308,7 +307,6 @@ pub struct Thread {
pub current: bool,
}


/// Represents a single exception
#[derive(Serialize, Deserialize, Debug, Default, Clone, PartialEq)]
pub struct Exception {
Expand Down Expand Up @@ -1061,16 +1059,18 @@ impl<'de> Deserialize<'de> for Addr {
Uint(u64),
}

Ok(Addr(match Repr::deserialize(deserializer).map_err(D::Error::custom)? {
Repr::Str(s) => {
if s.len() > 2 && (&s[..2] == "0x" || &s[..2] == "0X") {
u64::from_str_radix(&s[2..], 16).map_err(D::Error::custom)?
} else {
u64::from_str_radix(&s, 10).map_err(D::Error::custom)?
Ok(Addr(
match Repr::deserialize(deserializer).map_err(D::Error::custom)? {
Repr::Str(s) => {
if s.len() > 2 && (&s[..2] == "0x" || &s[..2] == "0X") {
u64::from_str_radix(&s[2..], 16).map_err(D::Error::custom)?
} else {
u64::from_str_radix(&s, 10).map_err(D::Error::custom)?
}
}
}
Repr::Uint(val) => val
}))
Repr::Uint(val) => val,
},
))
}
}

Expand Down
33 changes: 17 additions & 16 deletions src/utils.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
use chrono::{DateTime, Utc, TimeZone};
use chrono::{DateTime, TimeZone, Utc};

/// Converts a datetime object into a float timestamp.
pub fn datetime_to_timestamp(dt: &DateTime<Utc>) -> f64 {
if dt.timestamp_subsec_nanos() == 0 {
dt.timestamp() as f64
} else {
(dt.timestamp() as f64) +
((dt.timestamp_subsec_micros() as f64) / 1_000_000f64)
(dt.timestamp() as f64) + ((dt.timestamp_subsec_micros() as f64) / 1_000_000f64)
}
}

Expand All @@ -16,30 +15,30 @@ pub fn timestamp_to_datetime(ts: f64) -> DateTime<Utc> {
Utc.timestamp_opt(secs, micros * 1000).unwrap()
}


pub mod ts_seconds_float {
use std::fmt;
use serde::{ser, de};
use chrono::{DateTime, Utc, TimeZone};
use serde::{de, ser};
use chrono::{DateTime, TimeZone, Utc};

use super::timestamp_to_datetime;

pub fn deserialize<'de, D>(d: D) -> Result<DateTime<Utc>, D::Error>
where D: de::Deserializer<'de>
where
D: de::Deserializer<'de>,
{
Ok(d.deserialize_any(SecondsTimestampVisitor)
.map(|dt| dt.with_timezone(&Utc))?)
.map(|dt| dt.with_timezone(&Utc))?)
}

pub fn serialize<S>(dt: &DateTime<Utc>, serializer: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer
where
S: ser::Serializer,
{
if dt.timestamp_subsec_nanos() == 0 {
serializer.serialize_i64(dt.timestamp())
} else {
serializer.serialize_f64(
(dt.timestamp() as f64) +
((dt.timestamp_subsec_micros() as f64) / 1_000_000f64)
(dt.timestamp() as f64) + ((dt.timestamp_subsec_micros() as f64) / 1_000_000f64),
)
}
}
Expand All @@ -49,25 +48,27 @@ pub mod ts_seconds_float {
impl<'de> de::Visitor<'de> for SecondsTimestampVisitor {
type Value = DateTime<Utc>;

fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result
{
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "a unix timestamp")
}

fn visit_f64<E>(self, value: f64) -> Result<DateTime<Utc>, E>
where E: de::Error
where
E: de::Error,
{
Ok(timestamp_to_datetime(value))
}

fn visit_i64<E>(self, value: i64) -> Result<DateTime<Utc>, E>
where E: de::Error
where
E: de::Error,
{
Ok(Utc.timestamp_opt(value, 0).unwrap())
}

fn visit_u64<E>(self, value: u64) -> Result<DateTime<Utc>, E>
where E: de::Error
where
E: de::Error,
{
Ok(Utc.timestamp_opt(value as i64, 0).unwrap())
}
Expand Down
8 changes: 5 additions & 3 deletions tests/test_auth.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
extern crate chrono;
extern crate sentry_types;
use chrono::{TimeZone, Utc};
use sentry_types::{Auth, Dsn, protocol};

use sentry_types::{protocol, Auth, Dsn};

#[test]
fn test_auth_parsing() {
Expand All @@ -13,7 +12,10 @@ fn test_auth_parsing() {
sentry_secret=secret"
.parse()
.unwrap();
assert_eq!(auth.timestamp(), Some(Utc.ymd(2012, 2, 1).and_hms_milli(0, 14, 46, 500)));
assert_eq!(
auth.timestamp(),
Some(Utc.ymd(2012, 2, 1).and_hms_milli(0, 14, 46, 500))
);
assert_eq!(auth.client_agent(), Some("raven-python/42"));
assert_eq!(auth.version(), 6);
assert_eq!(auth.public_key(), "public");
Expand Down
Loading

0 comments on commit 437023c

Please sign in to comment.