Skip to content

Commit

Permalink
Merge branch 'main' into feat/block_paymentmethod
Browse files Browse the repository at this point in the history
  • Loading branch information
prajjwalkumar17 authored Jan 5, 2024
2 parents f39aebd + c8279b1 commit 4c2fe79
Show file tree
Hide file tree
Showing 43 changed files with 713 additions and 156 deletions.
13 changes: 7 additions & 6 deletions config/config.example.toml
Original file line number Diff line number Diff line change
Expand Up @@ -524,9 +524,10 @@ enabled = true # Switch to enable or disable PayPal onboarding
source = "logs" # The event sink to push events supports kafka or logs (stdout)

[events.kafka]
brokers = [] # Kafka broker urls for bootstrapping the client
intent_analytics_topic = "topic" # Kafka topic to be used for PaymentIntent events
attempt_analytics_topic = "topic" # Kafka topic to be used for PaymentAttempt events
refund_analytics_topic = "topic" # Kafka topic to be used for Refund events
api_logs_topic = "topic" # Kafka topic to be used for incoming api events
connector_logs_topic = "topic" # Kafka topic to be used for connector api events
brokers = [] # Kafka broker urls for bootstrapping the client
intent_analytics_topic = "topic" # Kafka topic to be used for PaymentIntent events
attempt_analytics_topic = "topic" # Kafka topic to be used for PaymentAttempt events
refund_analytics_topic = "topic" # Kafka topic to be used for Refund events
api_logs_topic = "topic" # Kafka topic to be used for incoming api events
connector_logs_topic = "topic" # Kafka topic to be used for connector api events
outgoing_webhook_logs_topic = "topic" # Kafka topic to be used for outgoing webhook events
1 change: 1 addition & 0 deletions config/development.toml
Original file line number Diff line number Diff line change
Expand Up @@ -519,6 +519,7 @@ attempt_analytics_topic = "hyperswitch-payment-attempt-events"
refund_analytics_topic = "hyperswitch-refund-events"
api_logs_topic = "hyperswitch-api-log-events"
connector_logs_topic = "hyperswitch-connector-api-events"
outgoing_webhook_logs_topic = "hyperswitch-outgoing-webhook-events"

[analytics]
source = "sqlx"
Expand Down
1 change: 1 addition & 0 deletions config/docker_compose.toml
Original file line number Diff line number Diff line change
Expand Up @@ -366,6 +366,7 @@ attempt_analytics_topic = "hyperswitch-payment-attempt-events"
refund_analytics_topic = "hyperswitch-refund-events"
api_logs_topic = "hyperswitch-api-log-events"
connector_logs_topic = "hyperswitch-connector-api-events"
outgoing_webhook_logs_topic = "hyperswitch-outgoing-webhook-events"

[analytics]
source = "sqlx"
Expand Down
109 changes: 109 additions & 0 deletions crates/analytics/docs/clickhouse/scripts/outgoing_webhook_events.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
CREATE TABLE
outgoing_webhook_events_queue (
`merchant_id` String,
`event_id` Nullable(String),
`event_type` LowCardinality(String),
`outgoing_webhook_event_type` LowCardinality(String),
`payment_id` Nullable(String),
`refund_id` Nullable(String),
`attempt_id` Nullable(String),
`dispute_id` Nullable(String),
`payment_method_id` Nullable(String),
`mandate_id` Nullable(String),
`content` Nullable(String),
`is_error` Bool,
`error` Nullable(String),
`created_at_timestamp` DateTime64(3)
) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
kafka_topic_list = 'hyperswitch-outgoing-webhook-events',
kafka_group_name = 'hyper-c1',
kafka_format = 'JSONEachRow',
kafka_handle_error_mode = 'stream';

CREATE TABLE
outgoing_webhook_events_cluster (
`merchant_id` String,
`event_id` String,
`event_type` LowCardinality(String),
`outgoing_webhook_event_type` LowCardinality(String),
`payment_id` Nullable(String),
`refund_id` Nullable(String),
`attempt_id` Nullable(String),
`dispute_id` Nullable(String),
`payment_method_id` Nullable(String),
`mandate_id` Nullable(String),
`content` Nullable(String),
`is_error` Bool,
`error` Nullable(String),
`created_at_timestamp` DateTime64(3),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
INDEX eventIndex event_type TYPE bloom_filter GRANULARITY 1,
INDEX webhookeventIndex outgoing_webhook_event_type TYPE bloom_filter GRANULARITY 1
) ENGINE = MergeTree PARTITION BY toStartOfDay(created_at_timestamp)
ORDER BY (
created_at_timestamp,
merchant_id,
event_id,
event_type,
outgoing_webhook_event_type
) TTL inserted_at + toIntervalMonth(6);

CREATE MATERIALIZED VIEW outgoing_webhook_events_mv TO outgoing_webhook_events_cluster (
`merchant_id` String,
`event_id` Nullable(String),
`event_type` LowCardinality(String),
`outgoing_webhook_event_type` LowCardinality(String),
`payment_id` Nullable(String),
`refund_id` Nullable(String),
`attempt_id` Nullable(String),
`dispute_id` Nullable(String),
`payment_method_id` Nullable(String),
`mandate_id` Nullable(String),
`content` Nullable(String),
`is_error` Bool,
`error` Nullable(String),
`created_at_timestamp` DateTime64(3),
`inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
) AS
SELECT
merchant_id,
event_id,
event_type,
outgoing_webhook_event_type,
payment_id,
refund_id,
attempt_id,
dispute_id,
payment_method_id,
mandate_id,
content,
is_error,
error,
created_at_timestamp,
now() AS inserted_at
FROM
outgoing_webhook_events_queue
where length(_error) = 0;

CREATE MATERIALIZED VIEW outgoing_webhook_parse_errors (
`topic` String,
`partition` Int64,
`offset` Int64,
`raw` String,
`error` String
) ENGINE = MergeTree
ORDER BY (
topic, partition,
offset
) SETTINGS index_granularity = 8192 AS
SELECT
_topic AS topic,
_partition AS partition,
_offset AS
offset
,
_raw_message AS raw,
_error AS error
FROM
outgoing_webhook_events_queue
WHERE length(_error) > 0;
6 changes: 6 additions & 0 deletions crates/api_models/src/health_check.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct RouterHealthCheckResponse {
pub database: String,
pub redis: String,
pub locker: String,
}
1 change: 1 addition & 0 deletions crates/api_models/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ pub mod errors;
pub mod events;
pub mod files;
pub mod gsm;
pub mod health_check;
pub mod locker_migration;
pub mod mandates;
pub mod organization;
Expand Down
3 changes: 1 addition & 2 deletions crates/common_utils/src/request.rs
Original file line number Diff line number Diff line change
Expand Up @@ -138,8 +138,7 @@ impl RequestBuilder {
}

pub fn headers(mut self, headers: Vec<(String, Maskable<String>)>) -> Self {
let mut h = headers.into_iter().map(|(h, v)| (h, v));
self.headers.extend(&mut h);
self.headers.extend(headers);
self
}

Expand Down
2 changes: 1 addition & 1 deletion crates/kgraph_utils/src/mca.rs
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ fn compile_request_pm_types(

let or_node_neighbor_id = if amount_nodes.len() == 1 {
amount_nodes
.get(0)
.first()
.copied()
.ok_or(KgraphError::IndexingError)?
} else {
Expand Down
1 change: 0 additions & 1 deletion crates/masking/src/diesel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
//! Diesel-related.
//!
pub use diesel::Expression;
use diesel::{
backend::Backend,
deserialize::{self, FromSql, Queryable},
Expand Down
2 changes: 1 addition & 1 deletion crates/masking/src/serde.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
//!
pub use erased_serde::Serialize as ErasedSerialize;
pub use serde::{de, ser, Deserialize, Serialize, Serializer};
pub use serde::{de, Deserialize, Serialize, Serializer};
use serde_json::{value::Serializer as JsonValueSerializer, Value};

use crate::{Secret, Strategy, StrongSecret, ZeroizableSecret};
Expand Down
2 changes: 1 addition & 1 deletion crates/redis_interface/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ pub use fred::interfaces::PubsubInterface;
use fred::{interfaces::ClientLike, prelude::EventInterface};
use router_env::logger;

pub use self::{commands::*, types::*};
pub use self::types::*;

pub struct RedisConnectionPool {
pub pool: fred::prelude::RedisPool,
Expand Down
2 changes: 1 addition & 1 deletion crates/router/src/configs/settings.rs
Original file line number Diff line number Diff line change
Expand Up @@ -556,7 +556,7 @@ impl From<Database> for storage_impl::config::Database {
dbname: val.dbname,
pool_size: val.pool_size,
connection_timeout: val.connection_timeout,
queue_strategy: val.queue_strategy.into(),
queue_strategy: val.queue_strategy,
min_idle: val.min_idle,
max_lifetime: val.max_lifetime,
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -297,11 +297,11 @@ fn build_error_response<T>(

get_error_response(
response
.get(0)
.first()
.and_then(|err_details| err_details.extensions.as_ref())
.and_then(|extensions| extensions.legacy_code.clone()),
response
.get(0)
.first()
.map(|err_details| err_details.message.clone()),
reason,
http_code,
Expand Down
5 changes: 1 addition & 4 deletions crates/router/src/connector/checkout/transformers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1026,10 +1026,7 @@ impl utils::MultipleCaptureSyncResponse for Box<PaymentsResponse> {
self.status == CheckoutPaymentStatus::Captured
}
fn get_amount_captured(&self) -> Option<i64> {
match self.amount {
Some(amount) => amount.try_into().ok(),
None => None,
}
self.amount.map(Into::into)
}
}

Expand Down
2 changes: 1 addition & 1 deletion crates/router/src/connector/stripe/transformers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2336,7 +2336,7 @@ pub fn get_connector_metadata(
let next_action_response = next_action
.and_then(|next_action_response| match next_action_response {
StripeNextActionResponse::DisplayBankTransferInstructions(response) => {
let bank_instructions = response.financial_addresses.get(0);
let bank_instructions = response.financial_addresses.first();
let (sepa_bank_instructions, bacs_bank_instructions) =
bank_instructions.map_or((None, None), |financial_address| {
(
Expand Down
Loading

0 comments on commit 4c2fe79

Please sign in to comment.