From 1d26df28bc5e1db359272b40adae70bfba9b7360 Mon Sep 17 00:00:00 2001 From: harsh-sharma-juspay <125131007+harsh-sharma-juspay@users.noreply.github.com> Date: Fri, 5 Jan 2024 16:01:56 +0530 Subject: [PATCH 1/3] feat(analytics): adding outgoing webhooks kafka event (#3140) Co-authored-by: hyperswitch-bot[bot] <148525504+hyperswitch-bot[bot]@users.noreply.github.com> --- config/config.example.toml | 13 ++- config/development.toml | 1 + config/docker_compose.toml | 1 + .../scripts/outgoing_webhook_events.sql | 109 +++++++++++++++++ crates/router/src/core/errors.rs | 2 +- crates/router/src/core/webhooks.rs | 37 +++++- crates/router/src/events.rs | 2 + .../src/events/outgoing_webhook_logs.rs | 110 ++++++++++++++++++ crates/router/src/services/kafka.rs | 4 + 9 files changed, 268 insertions(+), 11 deletions(-) create mode 100644 crates/analytics/docs/clickhouse/scripts/outgoing_webhook_events.sql create mode 100644 crates/router/src/events/outgoing_webhook_logs.rs diff --git a/config/config.example.toml b/config/config.example.toml index f9ae71d3b9ee..21a3ba6de93c 100644 --- a/config/config.example.toml +++ b/config/config.example.toml @@ -524,9 +524,10 @@ enabled = true # Switch to enable or disable PayPal onboarding source = "logs" # The event sink to push events supports kafka or logs (stdout) [events.kafka] -brokers = [] # Kafka broker urls for bootstrapping the client -intent_analytics_topic = "topic" # Kafka topic to be used for PaymentIntent events -attempt_analytics_topic = "topic" # Kafka topic to be used for PaymentAttempt events -refund_analytics_topic = "topic" # Kafka topic to be used for Refund events -api_logs_topic = "topic" # Kafka topic to be used for incoming api events -connector_logs_topic = "topic" # Kafka topic to be used for connector api events \ No newline at end of file +brokers = [] # Kafka broker urls for bootstrapping the client +intent_analytics_topic = "topic" # Kafka topic to be used for PaymentIntent events +attempt_analytics_topic = "topic" # Kafka topic to be used for PaymentAttempt events +refund_analytics_topic = "topic" # Kafka topic to be used for Refund events +api_logs_topic = "topic" # Kafka topic to be used for incoming api events +connector_logs_topic = "topic" # Kafka topic to be used for connector api events +outgoing_webhook_logs_topic = "topic" # Kafka topic to be used for outgoing webhook events diff --git a/config/development.toml b/config/development.toml index 6e7e040906a5..80d594b248bd 100644 --- a/config/development.toml +++ b/config/development.toml @@ -519,6 +519,7 @@ attempt_analytics_topic = "hyperswitch-payment-attempt-events" refund_analytics_topic = "hyperswitch-refund-events" api_logs_topic = "hyperswitch-api-log-events" connector_logs_topic = "hyperswitch-connector-api-events" +outgoing_webhook_logs_topic = "hyperswitch-outgoing-webhook-events" [analytics] source = "sqlx" diff --git a/config/docker_compose.toml b/config/docker_compose.toml index 63be7339c7bc..1d3c845aa54f 100644 --- a/config/docker_compose.toml +++ b/config/docker_compose.toml @@ -366,6 +366,7 @@ attempt_analytics_topic = "hyperswitch-payment-attempt-events" refund_analytics_topic = "hyperswitch-refund-events" api_logs_topic = "hyperswitch-api-log-events" connector_logs_topic = "hyperswitch-connector-api-events" +outgoing_webhook_logs_topic = "hyperswitch-outgoing-webhook-events" [analytics] source = "sqlx" diff --git a/crates/analytics/docs/clickhouse/scripts/outgoing_webhook_events.sql b/crates/analytics/docs/clickhouse/scripts/outgoing_webhook_events.sql new file mode 100644 index 000000000000..3dc907629d0a --- /dev/null +++ b/crates/analytics/docs/clickhouse/scripts/outgoing_webhook_events.sql @@ -0,0 +1,109 @@ +CREATE TABLE + outgoing_webhook_events_queue ( + `merchant_id` String, + `event_id` Nullable(String), + `event_type` LowCardinality(String), + `outgoing_webhook_event_type` LowCardinality(String), + `payment_id` Nullable(String), + `refund_id` Nullable(String), + `attempt_id` Nullable(String), + `dispute_id` Nullable(String), + `payment_method_id` Nullable(String), + `mandate_id` Nullable(String), + `content` Nullable(String), + `is_error` Bool, + `error` Nullable(String), + `created_at_timestamp` DateTime64(3) + ) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092', + kafka_topic_list = 'hyperswitch-outgoing-webhook-events', + kafka_group_name = 'hyper-c1', + kafka_format = 'JSONEachRow', + kafka_handle_error_mode = 'stream'; + +CREATE TABLE + outgoing_webhook_events_cluster ( + `merchant_id` String, + `event_id` String, + `event_type` LowCardinality(String), + `outgoing_webhook_event_type` LowCardinality(String), + `payment_id` Nullable(String), + `refund_id` Nullable(String), + `attempt_id` Nullable(String), + `dispute_id` Nullable(String), + `payment_method_id` Nullable(String), + `mandate_id` Nullable(String), + `content` Nullable(String), + `is_error` Bool, + `error` Nullable(String), + `created_at_timestamp` DateTime64(3), + `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4), + INDEX eventIndex event_type TYPE bloom_filter GRANULARITY 1, + INDEX webhookeventIndex outgoing_webhook_event_type TYPE bloom_filter GRANULARITY 1 + ) ENGINE = MergeTree PARTITION BY toStartOfDay(created_at_timestamp) +ORDER BY ( + created_at_timestamp, + merchant_id, + event_id, + event_type, + outgoing_webhook_event_type + ) TTL inserted_at + toIntervalMonth(6); + +CREATE MATERIALIZED VIEW outgoing_webhook_events_mv TO outgoing_webhook_events_cluster ( + `merchant_id` String, + `event_id` Nullable(String), + `event_type` LowCardinality(String), + `outgoing_webhook_event_type` LowCardinality(String), + `payment_id` Nullable(String), + `refund_id` Nullable(String), + `attempt_id` Nullable(String), + `dispute_id` Nullable(String), + `payment_method_id` Nullable(String), + `mandate_id` Nullable(String), + `content` Nullable(String), + `is_error` Bool, + `error` Nullable(String), + `created_at_timestamp` DateTime64(3), + `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4), +) AS +SELECT + merchant_id, + event_id, + event_type, + outgoing_webhook_event_type, + payment_id, + refund_id, + attempt_id, + dispute_id, + payment_method_id, + mandate_id, + content, + is_error, + error, + created_at_timestamp, + now() AS inserted_at +FROM + outgoing_webhook_events_queue +where length(_error) = 0; + +CREATE MATERIALIZED VIEW outgoing_webhook_parse_errors ( + `topic` String, + `partition` Int64, + `offset` Int64, + `raw` String, + `error` String +) ENGINE = MergeTree +ORDER BY ( + topic, partition, + offset + ) SETTINGS index_granularity = 8192 AS +SELECT + _topic AS topic, + _partition AS partition, + _offset AS +offset +, + _raw_message AS raw, + _error AS error +FROM + outgoing_webhook_events_queue +WHERE length(_error) > 0; \ No newline at end of file diff --git a/crates/router/src/core/errors.rs b/crates/router/src/core/errors.rs index 054f4053504e..cbc4290f63bb 100644 --- a/crates/router/src/core/errors.rs +++ b/crates/router/src/core/errors.rs @@ -226,7 +226,7 @@ pub enum KmsError { Utf8DecodingFailed, } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, serde::Serialize)] pub enum WebhooksFlowError { #[error("Merchant webhook config not found")] MerchantConfigNotFound, diff --git a/crates/router/src/core/webhooks.rs b/crates/router/src/core/webhooks.rs index 762ee19b6415..c7e7548f00bd 100644 --- a/crates/router/src/core/webhooks.rs +++ b/crates/router/src/core/webhooks.rs @@ -25,7 +25,10 @@ use crate::{ payments, refunds, }, db::StorageInterface, - events::api_logs::ApiEvent, + events::{ + api_logs::ApiEvent, + outgoing_webhook_logs::{OutgoingWebhookEvent, OutgoingWebhookEventMetric}, + }, logger, routes::{app::AppStateInfo, lock_utils, metrics::request::add_attributes, AppState}, services::{self, authentication as auth}, @@ -731,21 +734,47 @@ pub async fn create_event_and_trigger_outgoing_webhook(business_profile, outgoing_webhook, state).await; if let Err(e) = result { + error.replace( + serde_json::to_value(e.current_context()) + .into_report() + .attach_printable("Failed to serialize json error response") + .change_context(errors::ApiErrorResponse::WebhookProcessingFailure) + .ok() + .into(), + ); logger::error!(?e); } + let outgoing_webhook_event_type = content.get_outgoing_webhook_event_type(); + let webhook_event = OutgoingWebhookEvent::new( + merchant_account.merchant_id.clone(), + event.event_id.clone(), + event_type, + outgoing_webhook_event_type, + error.is_some(), + error, + ); + match webhook_event.clone().try_into() { + Ok(event) => { + state_clone.event_handler().log_event(event); + } + Err(err) => { + logger::error!(error=?err, event=?webhook_event, "Error Logging Outgoing Webhook Event"); + } + } }); } diff --git a/crates/router/src/events.rs b/crates/router/src/events.rs index 2dc9258e19df..0091de588f13 100644 --- a/crates/router/src/events.rs +++ b/crates/router/src/events.rs @@ -9,6 +9,7 @@ pub mod api_logs; pub mod connector_api_logs; pub mod event_logger; pub mod kafka_handler; +pub mod outgoing_webhook_logs; pub(super) trait EventHandler: Sync + Send + dyn_clone::DynClone { fn log_event(&self, event: RawEvent); @@ -31,6 +32,7 @@ pub enum EventType { Refund, ApiLogs, ConnectorApiLogs, + OutgoingWebhookLogs, } #[derive(Debug, Default, Deserialize, Clone)] diff --git a/crates/router/src/events/outgoing_webhook_logs.rs b/crates/router/src/events/outgoing_webhook_logs.rs new file mode 100644 index 000000000000..ebf36caf706e --- /dev/null +++ b/crates/router/src/events/outgoing_webhook_logs.rs @@ -0,0 +1,110 @@ +use api_models::{enums::EventType as OutgoingWebhookEventType, webhooks::OutgoingWebhookContent}; +use serde::Serialize; +use serde_json::Value; +use time::OffsetDateTime; + +use super::{EventType, RawEvent}; + +#[derive(Clone, Debug, PartialEq, Serialize)] +#[serde(rename_all = "snake_case")] +pub struct OutgoingWebhookEvent { + merchant_id: String, + event_id: String, + event_type: OutgoingWebhookEventType, + #[serde(flatten)] + content: Option, + is_error: bool, + error: Option, + created_at_timestamp: i128, +} + +#[derive(Clone, Debug, PartialEq, Serialize)] +#[serde(tag = "outgoing_webhook_event_type", rename_all = "snake_case")] +pub enum OutgoingWebhookEventContent { + Payment { + payment_id: Option, + content: Value, + }, + Refund { + payment_id: String, + refund_id: String, + content: Value, + }, + Dispute { + payment_id: String, + attempt_id: String, + dispute_id: String, + content: Value, + }, + Mandate { + payment_method_id: String, + mandate_id: String, + content: Value, + }, +} +pub trait OutgoingWebhookEventMetric { + fn get_outgoing_webhook_event_type(&self) -> Option; +} +impl OutgoingWebhookEventMetric for OutgoingWebhookContent { + fn get_outgoing_webhook_event_type(&self) -> Option { + match self { + Self::PaymentDetails(payment_payload) => Some(OutgoingWebhookEventContent::Payment { + payment_id: payment_payload.payment_id.clone(), + content: masking::masked_serialize(&payment_payload) + .unwrap_or(serde_json::json!({"error":"failed to serialize"})), + }), + Self::RefundDetails(refund_payload) => Some(OutgoingWebhookEventContent::Refund { + payment_id: refund_payload.payment_id.clone(), + refund_id: refund_payload.refund_id.clone(), + content: masking::masked_serialize(&refund_payload) + .unwrap_or(serde_json::json!({"error":"failed to serialize"})), + }), + Self::DisputeDetails(dispute_payload) => Some(OutgoingWebhookEventContent::Dispute { + payment_id: dispute_payload.payment_id.clone(), + attempt_id: dispute_payload.attempt_id.clone(), + dispute_id: dispute_payload.dispute_id.clone(), + content: masking::masked_serialize(&dispute_payload) + .unwrap_or(serde_json::json!({"error":"failed to serialize"})), + }), + Self::MandateDetails(mandate_payload) => Some(OutgoingWebhookEventContent::Mandate { + payment_method_id: mandate_payload.payment_method_id.clone(), + mandate_id: mandate_payload.mandate_id.clone(), + content: masking::masked_serialize(&mandate_payload) + .unwrap_or(serde_json::json!({"error":"failed to serialize"})), + }), + } + } +} + +impl OutgoingWebhookEvent { + pub fn new( + merchant_id: String, + event_id: String, + event_type: OutgoingWebhookEventType, + content: Option, + is_error: bool, + error: Option, + ) -> Self { + Self { + merchant_id, + event_id, + event_type, + content, + is_error, + error, + created_at_timestamp: OffsetDateTime::now_utc().unix_timestamp_nanos() / 1_000_000, + } + } +} + +impl TryFrom for RawEvent { + type Error = serde_json::Error; + + fn try_from(value: OutgoingWebhookEvent) -> Result { + Ok(Self { + event_type: EventType::OutgoingWebhookLogs, + key: value.merchant_id.clone(), + payload: serde_json::to_value(value)?, + }) + } +} diff --git a/crates/router/src/services/kafka.rs b/crates/router/src/services/kafka.rs index 4c65b4677872..5a6d7043e6d0 100644 --- a/crates/router/src/services/kafka.rs +++ b/crates/router/src/services/kafka.rs @@ -84,6 +84,7 @@ pub struct KafkaSettings { refund_analytics_topic: String, api_logs_topic: String, connector_logs_topic: String, + outgoing_webhook_logs_topic: String, } impl KafkaSettings { @@ -140,6 +141,7 @@ pub struct KafkaProducer { refund_analytics_topic: String, api_logs_topic: String, connector_logs_topic: String, + outgoing_webhook_logs_topic: String, } struct RdKafkaProducer(ThreadedProducer); @@ -177,6 +179,7 @@ impl KafkaProducer { refund_analytics_topic: conf.refund_analytics_topic.clone(), api_logs_topic: conf.api_logs_topic.clone(), connector_logs_topic: conf.connector_logs_topic.clone(), + outgoing_webhook_logs_topic: conf.outgoing_webhook_logs_topic.clone(), }) } @@ -309,6 +312,7 @@ impl KafkaProducer { EventType::PaymentIntent => &self.intent_analytics_topic, EventType::Refund => &self.refund_analytics_topic, EventType::ConnectorApiLogs => &self.connector_logs_topic, + EventType::OutgoingWebhookLogs => &self.outgoing_webhook_logs_topic, } } } From f30ba89884d3abf2356cf1870d833a97d2411f69 Mon Sep 17 00:00:00 2001 From: Chethan Rao <70657455+Chethan-rao@users.noreply.github.com> Date: Fri, 5 Jan 2024 16:21:40 +0530 Subject: [PATCH 2/3] feat: add deep health check (#3210) Co-authored-by: hyperswitch-bot[bot] <148525504+hyperswitch-bot[bot]@users.noreply.github.com> --- crates/api_models/src/health_check.rs | 6 + crates/api_models/src/lib.rs | 1 + crates/router/src/consts.rs | 2 + crates/router/src/db.rs | 2 + crates/router/src/db/health_check.rs | 147 +++++++++++++++++++++++ crates/router/src/db/kafka_store.rs | 23 ++++ crates/router/src/routes/app.rs | 5 +- crates/router/src/routes/health.rs | 62 +++++++++- crates/router/src/services/api.rs | 8 ++ crates/router/src/services/api/client.rs | 2 + crates/storage_impl/src/errors.rs | 50 ++++++++ 11 files changed, 304 insertions(+), 4 deletions(-) create mode 100644 crates/api_models/src/health_check.rs create mode 100644 crates/router/src/db/health_check.rs diff --git a/crates/api_models/src/health_check.rs b/crates/api_models/src/health_check.rs new file mode 100644 index 000000000000..d7bb120d0176 --- /dev/null +++ b/crates/api_models/src/health_check.rs @@ -0,0 +1,6 @@ +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct RouterHealthCheckResponse { + pub database: String, + pub redis: String, + pub locker: String, +} diff --git a/crates/api_models/src/lib.rs b/crates/api_models/src/lib.rs index 935944cf74c2..459443747e36 100644 --- a/crates/api_models/src/lib.rs +++ b/crates/api_models/src/lib.rs @@ -16,6 +16,7 @@ pub mod errors; pub mod events; pub mod files; pub mod gsm; +pub mod health_check; pub mod locker_migration; pub mod mandates; pub mod organization; diff --git a/crates/router/src/consts.rs b/crates/router/src/consts.rs index 4a2d2831d103..eff42c0cd7c4 100644 --- a/crates/router/src/consts.rs +++ b/crates/router/src/consts.rs @@ -70,3 +70,5 @@ pub const EMAIL_TOKEN_TIME_IN_SECS: u64 = 60 * 60 * 24; // 1 day pub const VERIFY_CONNECTOR_ID_PREFIX: &str = "conn_verify"; #[cfg(feature = "olap")] pub const VERIFY_CONNECTOR_MERCHANT_ID: &str = "test_merchant"; + +pub const LOCKER_HEALTH_CALL_PATH: &str = "/health"; diff --git a/crates/router/src/db.rs b/crates/router/src/db.rs index 0cd4cb218810..5beace9cbb83 100644 --- a/crates/router/src/db.rs +++ b/crates/router/src/db.rs @@ -14,6 +14,7 @@ pub mod events; pub mod file; pub mod fraud_check; pub mod gsm; +pub mod health_check; mod kafka_store; pub mod locker_mock_up; pub mod mandate; @@ -103,6 +104,7 @@ pub trait StorageInterface: + user_role::UserRoleInterface + authorization::AuthorizationInterface + user::sample_data::BatchSampleDataInterface + + health_check::HealthCheckInterface + 'static { fn get_scheduler_db(&self) -> Box; diff --git a/crates/router/src/db/health_check.rs b/crates/router/src/db/health_check.rs new file mode 100644 index 000000000000..73bc2a4321d7 --- /dev/null +++ b/crates/router/src/db/health_check.rs @@ -0,0 +1,147 @@ +use async_bb8_diesel::{AsyncConnection, AsyncRunQueryDsl}; +use diesel_models::ConfigNew; +use error_stack::ResultExt; +use router_env::logger; + +use super::{MockDb, StorageInterface, Store}; +use crate::{ + connection, + consts::LOCKER_HEALTH_CALL_PATH, + core::errors::{self, CustomResult}, + routes, + services::api as services, + types::storage, +}; + +#[async_trait::async_trait] +pub trait HealthCheckInterface { + async fn health_check_db(&self) -> CustomResult<(), errors::HealthCheckDBError>; + async fn health_check_redis( + &self, + db: &dyn StorageInterface, + ) -> CustomResult<(), errors::HealthCheckRedisError>; + async fn health_check_locker( + &self, + state: &routes::AppState, + ) -> CustomResult<(), errors::HealthCheckLockerError>; +} + +#[async_trait::async_trait] +impl HealthCheckInterface for Store { + async fn health_check_db(&self) -> CustomResult<(), errors::HealthCheckDBError> { + let conn = connection::pg_connection_write(self) + .await + .change_context(errors::HealthCheckDBError::DBError)?; + + let _data = conn + .transaction_async(|conn| { + Box::pin(async move { + let query = + diesel::select(diesel::dsl::sql::("1 + 1")); + let _x: i32 = query.get_result_async(&conn).await.map_err(|err| { + logger::error!(read_err=?err,"Error while reading element in the database"); + errors::HealthCheckDBError::DBReadError + })?; + + logger::debug!("Database read was successful"); + + let config = ConfigNew { + key: "test_key".to_string(), + config: "test_value".to_string(), + }; + + config.insert(&conn).await.map_err(|err| { + logger::error!(write_err=?err,"Error while writing to database"); + errors::HealthCheckDBError::DBWriteError + })?; + + logger::debug!("Database write was successful"); + + storage::Config::delete_by_key(&conn, "test_key").await.map_err(|err| { + logger::error!(delete_err=?err,"Error while deleting element in the database"); + errors::HealthCheckDBError::DBDeleteError + })?; + + logger::debug!("Database delete was successful"); + + Ok::<_, errors::HealthCheckDBError>(()) + }) + }) + .await?; + + Ok(()) + } + + async fn health_check_redis( + &self, + db: &dyn StorageInterface, + ) -> CustomResult<(), errors::HealthCheckRedisError> { + let redis_conn = db + .get_redis_conn() + .change_context(errors::HealthCheckRedisError::RedisConnectionError)?; + + redis_conn + .serialize_and_set_key_with_expiry("test_key", "test_value", 30) + .await + .change_context(errors::HealthCheckRedisError::SetFailed)?; + + logger::debug!("Redis set_key was successful"); + + redis_conn + .get_key("test_key") + .await + .change_context(errors::HealthCheckRedisError::GetFailed)?; + + logger::debug!("Redis get_key was successful"); + + redis_conn + .delete_key("test_key") + .await + .change_context(errors::HealthCheckRedisError::DeleteFailed)?; + + logger::debug!("Redis delete_key was successful"); + + Ok(()) + } + + async fn health_check_locker( + &self, + state: &routes::AppState, + ) -> CustomResult<(), errors::HealthCheckLockerError> { + let locker = &state.conf.locker; + if !locker.mock_locker { + let mut url = locker.host_rs.to_owned(); + url.push_str(LOCKER_HEALTH_CALL_PATH); + let request = services::Request::new(services::Method::Get, &url); + services::call_connector_api(state, request) + .await + .change_context(errors::HealthCheckLockerError::FailedToCallLocker)? + .ok(); + } + + logger::debug!("Locker call was successful"); + + Ok(()) + } +} + +#[async_trait::async_trait] +impl HealthCheckInterface for MockDb { + async fn health_check_db(&self) -> CustomResult<(), errors::HealthCheckDBError> { + Ok(()) + } + + async fn health_check_redis( + &self, + _: &dyn StorageInterface, + ) -> CustomResult<(), errors::HealthCheckRedisError> { + Ok(()) + } + + async fn health_check_locker( + &self, + _: &routes::AppState, + ) -> CustomResult<(), errors::HealthCheckLockerError> { + Ok(()) + } +} diff --git a/crates/router/src/db/kafka_store.rs b/crates/router/src/db/kafka_store.rs index db94c1bcbca9..1184992a8f7c 100644 --- a/crates/router/src/db/kafka_store.rs +++ b/crates/router/src/db/kafka_store.rs @@ -43,6 +43,7 @@ use crate::{ events::EventInterface, file::FileMetadataInterface, gsm::GsmInterface, + health_check::HealthCheckInterface, locker_mock_up::LockerMockUpInterface, mandate::MandateInterface, merchant_account::MerchantAccountInterface, @@ -57,6 +58,7 @@ use crate::{ routing_algorithm::RoutingAlgorithmInterface, MasterKeyInterface, StorageInterface, }, + routes, services::{authentication, kafka::KafkaProducer, Store}, types::{ domain, @@ -2131,3 +2133,24 @@ impl AuthorizationInterface for KafkaStore { .await } } + +#[async_trait::async_trait] +impl HealthCheckInterface for KafkaStore { + async fn health_check_db(&self) -> CustomResult<(), errors::HealthCheckDBError> { + self.diesel_store.health_check_db().await + } + + async fn health_check_redis( + &self, + db: &dyn StorageInterface, + ) -> CustomResult<(), errors::HealthCheckRedisError> { + self.diesel_store.health_check_redis(db).await + } + + async fn health_check_locker( + &self, + state: &routes::AppState, + ) -> CustomResult<(), errors::HealthCheckLockerError> { + self.diesel_store.health_check_locker(state).await + } +} diff --git a/crates/router/src/routes/app.rs b/crates/router/src/routes/app.rs index 0357cedd443c..6625a206be21 100644 --- a/crates/router/src/routes/app.rs +++ b/crates/router/src/routes/app.rs @@ -253,9 +253,10 @@ pub struct Health; impl Health { pub fn server(state: AppState) -> Scope { - web::scope("") + web::scope("health") .app_data(web::Data::new(state)) - .service(web::resource("/health").route(web::get().to(health))) + .service(web::resource("").route(web::get().to(health))) + .service(web::resource("/deep_check").route(web::post().to(deep_health_check))) } } diff --git a/crates/router/src/routes/health.rs b/crates/router/src/routes/health.rs index 7c7f29bd1819..f07b744f7f52 100644 --- a/crates/router/src/routes/health.rs +++ b/crates/router/src/routes/health.rs @@ -1,7 +1,9 @@ +use actix_web::web; +use api_models::health_check::RouterHealthCheckResponse; use router_env::{instrument, logger, tracing}; -use crate::routes::metrics; - +use super::app; +use crate::{routes::metrics, services}; /// . // #[logger::instrument(skip_all, name = "name1", level = "warn", fields( key1 = "val1" ))] #[instrument(skip_all)] @@ -11,3 +13,59 @@ pub async fn health() -> impl actix_web::Responder { logger::info!("Health was called"); actix_web::HttpResponse::Ok().body("health is good") } + +#[instrument(skip_all)] +pub async fn deep_health_check(state: web::Data) -> impl actix_web::Responder { + metrics::HEALTH_METRIC.add(&metrics::CONTEXT, 1, &[]); + let db = &*state.store; + let mut status_code = 200; + logger::info!("Deep health check was called"); + + logger::debug!("Database health check begin"); + + let db_status = match db.health_check_db().await { + Ok(_) => "Health is good".to_string(), + Err(err) => { + status_code = 500; + err.to_string() + } + }; + logger::debug!("Database health check end"); + + logger::debug!("Redis health check begin"); + + let redis_status = match db.health_check_redis(db).await { + Ok(_) => "Health is good".to_string(), + Err(err) => { + status_code = 500; + err.to_string() + } + }; + + logger::debug!("Redis health check end"); + + logger::debug!("Locker health check begin"); + + let locker_status = match db.health_check_locker(&state).await { + Ok(_) => "Health is good".to_string(), + Err(err) => { + status_code = 500; + err.to_string() + } + }; + + logger::debug!("Locker health check end"); + + let response = serde_json::to_string(&RouterHealthCheckResponse { + database: db_status, + redis: redis_status, + locker: locker_status, + }) + .unwrap_or_default(); + + if status_code == 200 { + services::http_response_json(response) + } else { + services::http_server_error_json_response(response) + } +} diff --git a/crates/router/src/services/api.rs b/crates/router/src/services/api.rs index 92fda578727c..71687e02c121 100644 --- a/crates/router/src/services/api.rs +++ b/crates/router/src/services/api.rs @@ -1138,6 +1138,14 @@ pub fn http_response_json(response: T) -> HttpRe .body(response) } +pub fn http_server_error_json_response( + response: T, +) -> HttpResponse { + HttpResponse::InternalServerError() + .content_type(mime::APPLICATION_JSON) + .body(response) +} + pub fn http_response_json_with_headers( response: T, mut headers: Vec<(String, String)>, diff --git a/crates/router/src/services/api/client.rs b/crates/router/src/services/api/client.rs index cc7353dcda6b..fca85c41699a 100644 --- a/crates/router/src/services/api/client.rs +++ b/crates/router/src/services/api/client.rs @@ -10,6 +10,7 @@ use router_env::tracing_actix_web::RequestId; use super::{request::Maskable, Request}; use crate::{ configs::settings::{Locker, Proxy}, + consts::LOCKER_HEALTH_CALL_PATH, core::{ errors::{ApiClientError, CustomResult}, payments, @@ -119,6 +120,7 @@ pub fn proxy_bypass_urls(locker: &Locker) -> Vec { format!("{locker_host_rs}/cards/add"), format!("{locker_host_rs}/cards/retrieve"), format!("{locker_host_rs}/cards/delete"), + format!("{locker_host_rs}{}", LOCKER_HEALTH_CALL_PATH), format!("{locker_host}/card/addCard"), format!("{locker_host}/card/getCard"), format!("{locker_host}/card/deleteCard"), diff --git a/crates/storage_impl/src/errors.rs b/crates/storage_impl/src/errors.rs index f0cbebf78c55..50173bb1c739 100644 --- a/crates/storage_impl/src/errors.rs +++ b/crates/storage_impl/src/errors.rs @@ -376,3 +376,53 @@ pub enum ConnectorError { #[error("Missing 3DS redirection payload: {field_name}")] MissingConnectorRedirectionPayload { field_name: &'static str }, } + +#[derive(Debug, thiserror::Error)] +pub enum HealthCheckDBError { + #[error("Error while connecting to database")] + DBError, + #[error("Error while writing to database")] + DBWriteError, + #[error("Error while reading element in the database")] + DBReadError, + #[error("Error while deleting element in the database")] + DBDeleteError, + #[error("Unpredictable error occurred")] + UnknownError, + #[error("Error in database transaction")] + TransactionError, +} + +impl From for HealthCheckDBError { + fn from(error: diesel::result::Error) -> Self { + match error { + diesel::result::Error::DatabaseError(_, _) => Self::DBError, + + diesel::result::Error::RollbackErrorOnCommit { .. } + | diesel::result::Error::RollbackTransaction + | diesel::result::Error::AlreadyInTransaction + | diesel::result::Error::NotInTransaction + | diesel::result::Error::BrokenTransactionManager => Self::TransactionError, + + _ => Self::UnknownError, + } + } +} + +#[derive(Debug, thiserror::Error)] +pub enum HealthCheckRedisError { + #[error("Failed to establish Redis connection")] + RedisConnectionError, + #[error("Failed to set key value in Redis")] + SetFailed, + #[error("Failed to get key value in Redis")] + GetFailed, + #[error("Failed to delete key value in Redis")] + DeleteFailed, +} + +#[derive(Debug, Clone, thiserror::Error)] +pub enum HealthCheckLockerError { + #[error("Failed to establish Locker connection")] + FailedToCallLocker, +} From c8279b110e6c55784f042aebb956931e1870b0ca Mon Sep 17 00:00:00 2001 From: Chethan Rao <70657455+Chethan-rao@users.noreply.github.com> Date: Fri, 5 Jan 2024 16:22:31 +0530 Subject: [PATCH 3/3] chore: address Rust 1.75 clippy lints (#3231) Co-authored-by: hyperswitch-bot[bot] <148525504+hyperswitch-bot[bot]@users.noreply.github.com> --- crates/common_utils/src/request.rs | 3 +- crates/kgraph_utils/src/mca.rs | 2 +- crates/masking/src/diesel.rs | 1 - crates/masking/src/serde.rs | 2 +- crates/redis_interface/src/lib.rs | 2 +- crates/router/src/configs/settings.rs | 2 +- .../braintree_graphql_transformers.rs | 4 +- .../src/connector/checkout/transformers.rs | 5 +- .../src/connector/stripe/transformers.rs | 2 +- crates/router/src/connector/utils.rs | 160 +++++++++--------- crates/router/src/connector/wise.rs | 4 +- crates/router/src/core/fraud_check.rs | 10 +- crates/router/src/core/payments.rs | 5 +- .../router/src/core/payments/transformers.rs | 2 +- crates/router/src/db/dispute.rs | 4 +- crates/router/src/macros.rs | 5 +- crates/router/src/services/api.rs | 28 +-- .../types/api/connector_onboarding/paypal.rs | 2 +- crates/router/src/types/storage.rs | 1 - crates/router/src/types/storage/query.rs | 1 - crates/router/src/utils.rs | 20 +-- crates/router/src/utils/user/sample_data.rs | 4 +- crates/router/tests/connectors/utils.rs | 2 +- crates/scheduler/src/consumer.rs | 6 + crates/scheduler/src/producer.rs | 6 + 25 files changed, 141 insertions(+), 142 deletions(-) delete mode 100644 crates/router/src/types/storage/query.rs diff --git a/crates/common_utils/src/request.rs b/crates/common_utils/src/request.rs index e3aecb4b5695..f063a1940bef 100644 --- a/crates/common_utils/src/request.rs +++ b/crates/common_utils/src/request.rs @@ -138,8 +138,7 @@ impl RequestBuilder { } pub fn headers(mut self, headers: Vec<(String, Maskable)>) -> Self { - let mut h = headers.into_iter().map(|(h, v)| (h, v)); - self.headers.extend(&mut h); + self.headers.extend(headers); self } diff --git a/crates/kgraph_utils/src/mca.rs b/crates/kgraph_utils/src/mca.rs index 0e224a8f3d9d..a04e052514d6 100644 --- a/crates/kgraph_utils/src/mca.rs +++ b/crates/kgraph_utils/src/mca.rs @@ -156,7 +156,7 @@ fn compile_request_pm_types( let or_node_neighbor_id = if amount_nodes.len() == 1 { amount_nodes - .get(0) + .first() .copied() .ok_or(KgraphError::IndexingError)? } else { diff --git a/crates/masking/src/diesel.rs b/crates/masking/src/diesel.rs index 307f083d27fb..f3576298bdb1 100644 --- a/crates/masking/src/diesel.rs +++ b/crates/masking/src/diesel.rs @@ -2,7 +2,6 @@ //! Diesel-related. //! -pub use diesel::Expression; use diesel::{ backend::Backend, deserialize::{self, FromSql, Queryable}, diff --git a/crates/masking/src/serde.rs b/crates/masking/src/serde.rs index 944392e693ff..bb81717fd670 100644 --- a/crates/masking/src/serde.rs +++ b/crates/masking/src/serde.rs @@ -3,7 +3,7 @@ //! pub use erased_serde::Serialize as ErasedSerialize; -pub use serde::{de, ser, Deserialize, Serialize, Serializer}; +pub use serde::{de, Deserialize, Serialize, Serializer}; use serde_json::{value::Serializer as JsonValueSerializer, Value}; use crate::{Secret, Strategy, StrongSecret, ZeroizableSecret}; diff --git a/crates/redis_interface/src/lib.rs b/crates/redis_interface/src/lib.rs index 7111869a5c03..33d40ebe155d 100644 --- a/crates/redis_interface/src/lib.rs +++ b/crates/redis_interface/src/lib.rs @@ -28,7 +28,7 @@ pub use fred::interfaces::PubsubInterface; use fred::{interfaces::ClientLike, prelude::EventInterface}; use router_env::logger; -pub use self::{commands::*, types::*}; +pub use self::types::*; pub struct RedisConnectionPool { pub pool: fred::prelude::RedisPool, diff --git a/crates/router/src/configs/settings.rs b/crates/router/src/configs/settings.rs index b62831950856..db59d7f29148 100644 --- a/crates/router/src/configs/settings.rs +++ b/crates/router/src/configs/settings.rs @@ -556,7 +556,7 @@ impl From for storage_impl::config::Database { dbname: val.dbname, pool_size: val.pool_size, connection_timeout: val.connection_timeout, - queue_strategy: val.queue_strategy.into(), + queue_strategy: val.queue_strategy, min_idle: val.min_idle, max_lifetime: val.max_lifetime, } diff --git a/crates/router/src/connector/braintree/braintree_graphql_transformers.rs b/crates/router/src/connector/braintree/braintree_graphql_transformers.rs index 9bdbd4392f7c..e0baf034f72a 100644 --- a/crates/router/src/connector/braintree/braintree_graphql_transformers.rs +++ b/crates/router/src/connector/braintree/braintree_graphql_transformers.rs @@ -297,11 +297,11 @@ fn build_error_response( get_error_response( response - .get(0) + .first() .and_then(|err_details| err_details.extensions.as_ref()) .and_then(|extensions| extensions.legacy_code.clone()), response - .get(0) + .first() .map(|err_details| err_details.message.clone()), reason, http_code, diff --git a/crates/router/src/connector/checkout/transformers.rs b/crates/router/src/connector/checkout/transformers.rs index 5bd80a10c4b5..6e7656c38a6f 100644 --- a/crates/router/src/connector/checkout/transformers.rs +++ b/crates/router/src/connector/checkout/transformers.rs @@ -1026,10 +1026,7 @@ impl utils::MultipleCaptureSyncResponse for Box { self.status == CheckoutPaymentStatus::Captured } fn get_amount_captured(&self) -> Option { - match self.amount { - Some(amount) => amount.try_into().ok(), - None => None, - } + self.amount.map(Into::into) } } diff --git a/crates/router/src/connector/stripe/transformers.rs b/crates/router/src/connector/stripe/transformers.rs index b044331379a1..ba5dfc7fef91 100644 --- a/crates/router/src/connector/stripe/transformers.rs +++ b/crates/router/src/connector/stripe/transformers.rs @@ -2336,7 +2336,7 @@ pub fn get_connector_metadata( let next_action_response = next_action .and_then(|next_action_response| match next_action_response { StripeNextActionResponse::DisplayBankTransferInstructions(response) => { - let bank_instructions = response.financial_addresses.get(0); + let bank_instructions = response.financial_addresses.first(); let (sepa_bank_instructions, bacs_bank_instructions) = bank_instructions.map_or((None, None), |financial_address| { ( diff --git a/crates/router/src/connector/utils.rs b/crates/router/src/connector/utils.rs index 24def6253726..0dca3ace9479 100644 --- a/crates/router/src/connector/utils.rs +++ b/crates/router/src/connector/utils.rs @@ -1512,86 +1512,6 @@ pub fn get_error_code_error_message_based_on_priority( .cloned() } -#[cfg(test)] -mod error_code_error_message_tests { - #![allow(clippy::unwrap_used)] - use super::*; - - struct TestConnector; - - impl ConnectorErrorTypeMapping for TestConnector { - fn get_connector_error_type( - &self, - error_code: String, - error_message: String, - ) -> ConnectorErrorType { - match (error_code.as_str(), error_message.as_str()) { - ("01", "INVALID_MERCHANT") => ConnectorErrorType::BusinessError, - ("03", "INVALID_CVV") => ConnectorErrorType::UserError, - ("04", "04") => ConnectorErrorType::TechnicalError, - _ => ConnectorErrorType::UnknownError, - } - } - } - - #[test] - fn test_get_error_code_error_message_based_on_priority() { - let error_code_message_list_unknown = vec![ - ErrorCodeAndMessage { - error_code: "01".to_string(), - error_message: "INVALID_MERCHANT".to_string(), - }, - ErrorCodeAndMessage { - error_code: "05".to_string(), - error_message: "05".to_string(), - }, - ErrorCodeAndMessage { - error_code: "03".to_string(), - error_message: "INVALID_CVV".to_string(), - }, - ErrorCodeAndMessage { - error_code: "04".to_string(), - error_message: "04".to_string(), - }, - ]; - let error_code_message_list_user = vec![ - ErrorCodeAndMessage { - error_code: "01".to_string(), - error_message: "INVALID_MERCHANT".to_string(), - }, - ErrorCodeAndMessage { - error_code: "03".to_string(), - error_message: "INVALID_CVV".to_string(), - }, - ]; - let error_code_error_message_unknown = get_error_code_error_message_based_on_priority( - TestConnector, - error_code_message_list_unknown, - ); - let error_code_error_message_user = get_error_code_error_message_based_on_priority( - TestConnector, - error_code_message_list_user, - ); - let error_code_error_message_none = - get_error_code_error_message_based_on_priority(TestConnector, vec![]); - assert_eq!( - error_code_error_message_unknown, - Some(ErrorCodeAndMessage { - error_code: "05".to_string(), - error_message: "05".to_string(), - }) - ); - assert_eq!( - error_code_error_message_user, - Some(ErrorCodeAndMessage { - error_code: "03".to_string(), - error_message: "INVALID_CVV".to_string(), - }) - ); - assert_eq!(error_code_error_message_none, None); - } -} - pub trait MultipleCaptureSyncResponse { fn get_connector_capture_id(&self) -> String; fn get_capture_attempt_status(&self) -> enums::AttemptStatus; @@ -1785,3 +1705,83 @@ impl FrmTransactionRouterDataRequest for fraud_check::FrmTransactionRouterData { } } } + +#[cfg(test)] +mod error_code_error_message_tests { + #![allow(clippy::unwrap_used)] + use super::*; + + struct TestConnector; + + impl ConnectorErrorTypeMapping for TestConnector { + fn get_connector_error_type( + &self, + error_code: String, + error_message: String, + ) -> ConnectorErrorType { + match (error_code.as_str(), error_message.as_str()) { + ("01", "INVALID_MERCHANT") => ConnectorErrorType::BusinessError, + ("03", "INVALID_CVV") => ConnectorErrorType::UserError, + ("04", "04") => ConnectorErrorType::TechnicalError, + _ => ConnectorErrorType::UnknownError, + } + } + } + + #[test] + fn test_get_error_code_error_message_based_on_priority() { + let error_code_message_list_unknown = vec![ + ErrorCodeAndMessage { + error_code: "01".to_string(), + error_message: "INVALID_MERCHANT".to_string(), + }, + ErrorCodeAndMessage { + error_code: "05".to_string(), + error_message: "05".to_string(), + }, + ErrorCodeAndMessage { + error_code: "03".to_string(), + error_message: "INVALID_CVV".to_string(), + }, + ErrorCodeAndMessage { + error_code: "04".to_string(), + error_message: "04".to_string(), + }, + ]; + let error_code_message_list_user = vec![ + ErrorCodeAndMessage { + error_code: "01".to_string(), + error_message: "INVALID_MERCHANT".to_string(), + }, + ErrorCodeAndMessage { + error_code: "03".to_string(), + error_message: "INVALID_CVV".to_string(), + }, + ]; + let error_code_error_message_unknown = get_error_code_error_message_based_on_priority( + TestConnector, + error_code_message_list_unknown, + ); + let error_code_error_message_user = get_error_code_error_message_based_on_priority( + TestConnector, + error_code_message_list_user, + ); + let error_code_error_message_none = + get_error_code_error_message_based_on_priority(TestConnector, vec![]); + assert_eq!( + error_code_error_message_unknown, + Some(ErrorCodeAndMessage { + error_code: "05".to_string(), + error_message: "05".to_string(), + }) + ); + assert_eq!( + error_code_error_message_user, + Some(ErrorCodeAndMessage { + error_code: "03".to_string(), + error_message: "INVALID_CVV".to_string(), + }) + ); + assert_eq!(error_code_error_message_none, None); + } +} diff --git a/crates/router/src/connector/wise.rs b/crates/router/src/connector/wise.rs index 0674cc2ff8bc..d2ec08c607c4 100644 --- a/crates/router/src/connector/wise.rs +++ b/crates/router/src/connector/wise.rs @@ -90,7 +90,7 @@ impl ConnectorCommon for Wise { let default_status = response.status.unwrap_or_default().to_string(); match response.errors { Some(errs) => { - if let Some(e) = errs.get(0) { + if let Some(e) = errs.first() { Ok(types::ErrorResponse { status_code: res.status_code, code: e.code.clone(), @@ -301,7 +301,7 @@ impl services::ConnectorIntegration = list.into_iter().map(ForeignFrom::foreign_from).collect(); diff --git a/crates/router/src/core/payments/transformers.rs b/crates/router/src/core/payments/transformers.rs index 577480d311d7..ff8b95d4ab44 100644 --- a/crates/router/src/core/payments/transformers.rs +++ b/crates/router/src/core/payments/transformers.rs @@ -547,7 +547,7 @@ where if third_party_sdk_session_next_action(&payment_attempt, operation) { next_action_response = Some( api_models::payments::NextActionData::ThirdPartySdkSessionToken { - session_token: payment_data.sessions_token.get(0).cloned(), + session_token: payment_data.sessions_token.first().cloned(), }, ) } diff --git a/crates/router/src/db/dispute.rs b/crates/router/src/db/dispute.rs index c63585205bb3..4529b121fa24 100644 --- a/crates/router/src/db/dispute.rs +++ b/crates/router/src/db/dispute.rs @@ -572,7 +572,7 @@ mod tests { assert_eq!(1, found_disputes.len()); - assert_eq!(created_dispute, found_disputes.get(0).unwrap().clone()); + assert_eq!(created_dispute, found_disputes.first().unwrap().clone()); } #[tokio::test] @@ -611,7 +611,7 @@ mod tests { assert_eq!(1, found_disputes.len()); - assert_eq!(created_dispute, found_disputes.get(0).unwrap().clone()); + assert_eq!(created_dispute, found_disputes.first().unwrap().clone()); } mod update_dispute { diff --git a/crates/router/src/macros.rs b/crates/router/src/macros.rs index e6c9dba7d6e2..efe71e49bb04 100644 --- a/crates/router/src/macros.rs +++ b/crates/router/src/macros.rs @@ -1,4 +1 @@ -pub use common_utils::{ - async_spawn, collect_missing_value_keys, fallback_reverse_lookup_not_found, newtype, - newtype_impl, -}; +pub use common_utils::{collect_missing_value_keys, newtype}; diff --git a/crates/router/src/services/api.rs b/crates/router/src/services/api.rs index 71687e02c121..9ac8d5e5eb41 100644 --- a/crates/router/src/services/api.rs +++ b/crates/router/src/services/api.rs @@ -1539,16 +1539,16 @@ pub fn build_redirection_form( var responseForm = document.createElement('form'); responseForm.action=window.location.pathname.replace(/payments\\/redirect\\/(\\w+)\\/(\\w+)\\/\\w+/, \"payments/$1/$2/redirect/complete/nmi\"); responseForm.method='POST'; - + const threeDSsecureInterface = threeDS.createUI(options); threeDSsecureInterface.start('body'); - + threeDSsecureInterface.on('challenge', function(e) {{ console.log('Challenged'); }}); - + threeDSsecureInterface.on('complete', function(e) {{ - + var item1=document.createElement('input'); item1.type='hidden'; item1.name='cavv'; @@ -1582,11 +1582,11 @@ pub fn build_redirection_form( document.body.appendChild(responseForm); responseForm.submit(); }}); - + threeDSsecureInterface.on('failure', function(e) {{ responseForm.submit(); }}); - + " ))) } @@ -1594,14 +1594,6 @@ pub fn build_redirection_form( } } -#[cfg(test)] -mod tests { - #[test] - fn test_mime_essence() { - assert_eq!(mime::APPLICATION_JSON.essence_str(), "application/json"); - } -} - pub fn build_payment_link_html( payment_link_data: PaymentLinkFormData, ) -> CustomResult { @@ -1631,3 +1623,11 @@ pub fn build_payment_link_html( fn get_hyper_loader_sdk(sdk_url: &str) -> String { format!("") } + +#[cfg(test)] +mod tests { + #[test] + fn test_mime_essence() { + assert_eq!(mime::APPLICATION_JSON.essence_str(), "application/json"); + } +} diff --git a/crates/router/src/types/api/connector_onboarding/paypal.rs b/crates/router/src/types/api/connector_onboarding/paypal.rs index 0cc026d4d7ad..dbfdd6f50075 100644 --- a/crates/router/src/types/api/connector_onboarding/paypal.rs +++ b/crates/router/src/types/api/connector_onboarding/paypal.rs @@ -177,7 +177,7 @@ pub enum VettingStatus { impl SellerStatusResponse { pub fn extract_merchant_details_url(self, paypal_base_url: &str) -> RouterResult { self.links - .get(0) + .first() .and_then(|link| link.href.strip_prefix('/')) .map(|link| format!("{}{}", paypal_base_url, link)) .ok_or(ApiErrorResponse::InternalServerError) diff --git a/crates/router/src/types/storage.rs b/crates/router/src/types/storage.rs index 1dc241cde20c..56d3272b9471 100644 --- a/crates/router/src/types/storage.rs +++ b/crates/router/src/types/storage.rs @@ -26,7 +26,6 @@ pub mod payment_link; pub mod payment_method; pub mod payout_attempt; pub mod payouts; -mod query; pub mod refund; pub mod reverse_lookup; pub mod routing_algorithm; diff --git a/crates/router/src/types/storage/query.rs b/crates/router/src/types/storage/query.rs deleted file mode 100644 index 1483dec3eab3..000000000000 --- a/crates/router/src/types/storage/query.rs +++ /dev/null @@ -1 +0,0 @@ -pub use diesel_models::query::*; diff --git a/crates/router/src/utils.rs b/crates/router/src/utils.rs index 42116e1ecbf0..aaa145099e4e 100644 --- a/crates/router/src/utils.rs +++ b/crates/router/src/utils.rs @@ -196,16 +196,6 @@ impl QrImage { } } -#[cfg(test)] -mod tests { - use crate::utils; - #[test] - fn test_image_data_source_url() { - let qr_image_data_source_url = utils::QrImage::new_from_data("Hyperswitch".to_string()); - assert!(qr_image_data_source_url.is_ok()); - } -} - pub async fn find_payment_intent_from_payment_id_type( db: &dyn StorageInterface, payment_id_type: payments::PaymentIdType, @@ -804,3 +794,13 @@ pub async fn flatten_join_error(handle: Handle) -> RouterResult { .attach_printable("Join Error"), } } + +#[cfg(test)] +mod tests { + use crate::utils; + #[test] + fn test_image_data_source_url() { + let qr_image_data_source_url = utils::QrImage::new_from_data("Hyperswitch".to_string()); + assert!(qr_image_data_source_url.is_ok()); + } +} diff --git a/crates/router/src/utils/user/sample_data.rs b/crates/router/src/utils/user/sample_data.rs index 9f95e2d078dd..5ce0818b82b3 100644 --- a/crates/router/src/utils/user/sample_data.rs +++ b/crates/router/src/utils/user/sample_data.rs @@ -48,9 +48,9 @@ pub async fn generate_sample_data( .change_context(SampleDataError::InternalServerError) .attach_printable("Error while parsing primary business details")?; - let business_country_default = merchant_parsed_details.get(0).map(|x| x.country); + let business_country_default = merchant_parsed_details.first().map(|x| x.country); - let business_label_default = merchant_parsed_details.get(0).map(|x| x.business.clone()); + let business_label_default = merchant_parsed_details.first().map(|x| x.business.clone()); let profile_id = crate::core::utils::get_profile_id_from_business_details( business_country_default, diff --git a/crates/router/tests/connectors/utils.rs b/crates/router/tests/connectors/utils.rs index 62fce84f1f9d..c27a648a3051 100644 --- a/crates/router/tests/connectors/utils.rs +++ b/crates/router/tests/connectors/utils.rs @@ -458,7 +458,7 @@ pub trait ConnectorActions: Connector { customer_details: Some(payments::CustomerDetails { customer_id: core_utils::get_or_generate_id("customer_id", &None, "cust_").ok(), name: Some(Secret::new("John Doe".to_string())), - email: TryFrom::try_from(Secret::new("john.doe@example".to_string())).ok(), + email: TryFrom::try_from("john.doe@example".to_string()).ok(), phone: Some(Secret::new("620874518".to_string())), phone_country_code: Some("+31".to_string()), }), diff --git a/crates/scheduler/src/consumer.rs b/crates/scheduler/src/consumer.rs index ef0386bec299..ccc943afba3c 100644 --- a/crates/scheduler/src/consumer.rs +++ b/crates/scheduler/src/consumer.rs @@ -40,9 +40,15 @@ pub async fn start_consumer( use rand::distributions::{Distribution, Uniform}; let mut rng = rand::thread_rng(); + + // TODO: this can be removed once rand-0.9 is released + // reference - https://github.com/rust-random/rand/issues/1326#issuecomment-1635331942 + #[allow(unknown_lints)] + #[allow(clippy::unnecessary_fallible_conversions)] let timeout = Uniform::try_from(0..=settings.loop_interval) .into_report() .change_context(errors::ProcessTrackerError::ConfigurationError)?; + tokio::time::sleep(Duration::from_millis(timeout.sample(&mut rng))).await; let mut interval = tokio::time::interval(Duration::from_millis(settings.loop_interval)); diff --git a/crates/scheduler/src/producer.rs b/crates/scheduler/src/producer.rs index 52510e1842e0..bcf37cdf6f22 100644 --- a/crates/scheduler/src/producer.rs +++ b/crates/scheduler/src/producer.rs @@ -30,9 +30,15 @@ where use rand::distributions::{Distribution, Uniform}; let mut rng = rand::thread_rng(); + + // TODO: this can be removed once rand-0.9 is released + // reference - https://github.com/rust-random/rand/issues/1326#issuecomment-1635331942 + #[allow(unknown_lints)] + #[allow(clippy::unnecessary_fallible_conversions)] let timeout = Uniform::try_from(0..=scheduler_settings.loop_interval) .into_report() .change_context(errors::ProcessTrackerError::ConfigurationError)?; + tokio::time::sleep(Duration::from_millis(timeout.sample(&mut rng))).await; let mut interval = tokio::time::interval(std::time::Duration::from_millis(