From 20c4226a36e4650a3ba8811b758ac5f7969bcfb3 Mon Sep 17 00:00:00 2001 From: Apoorv Dixit <64925866+apoorvdixit88@users.noreply.github.com> Date: Fri, 10 Nov 2023 11:47:32 +0530 Subject: [PATCH 1/5] feat(user): setup user tables (#2803) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Sahkal Poddar Co-authored-by: Sahkal Poddar Co-authored-by: Sai Harsha Vardhan <56996463+sai-harsha-vardhan@users.noreply.github.com> Co-authored-by: Venkatesh Co-authored-by: venkatesh.devendran Co-authored-by: Abhishek Marrivagu <68317979+Abhicodes-crypto@users.noreply.github.com> --- crates/diesel_models/src/enums.rs | 22 ++ crates/diesel_models/src/lib.rs | 2 + crates/diesel_models/src/query.rs | 2 + crates/diesel_models/src/query/user.rs | 62 ++++ crates/diesel_models/src/query/user_role.rs | 58 ++++ crates/diesel_models/src/schema.rs | 47 ++++ crates/diesel_models/src/user.rs | 76 +++++ crates/diesel_models/src/user_role.rs | 79 ++++++ crates/router/src/db.rs | 4 + crates/router/src/db/user.rs | 265 ++++++++++++++++++ crates/router/src/db/user_role.rs | 255 +++++++++++++++++ crates/router/src/types/storage.rs | 4 +- crates/router/src/types/storage/user.rs | 1 + crates/router/src/types/storage/user_role.rs | 1 + crates/storage_impl/src/mock_db.rs | 4 + .../down.sql | 2 + .../up.sql | 14 + .../down.sql | 4 + .../up.sql | 18 ++ 19 files changed, 919 insertions(+), 1 deletion(-) create mode 100644 crates/diesel_models/src/query/user.rs create mode 100644 crates/diesel_models/src/query/user_role.rs create mode 100644 crates/diesel_models/src/user.rs create mode 100644 crates/diesel_models/src/user_role.rs create mode 100644 crates/router/src/db/user.rs create mode 100644 crates/router/src/db/user_role.rs create mode 100644 crates/router/src/types/storage/user.rs create mode 100644 crates/router/src/types/storage/user_role.rs create mode 100644 migrations/2023-11-06-110233_create_user_table/down.sql create mode 100644 migrations/2023-11-06-110233_create_user_table/up.sql create mode 100644 migrations/2023-11-06-113726_create_user_roles_table/down.sql create mode 100644 migrations/2023-11-06-113726_create_user_roles_table/up.sql diff --git a/crates/diesel_models/src/enums.rs b/crates/diesel_models/src/enums.rs index 0e06a324f038..ec021f0f51a5 100644 --- a/crates/diesel_models/src/enums.rs +++ b/crates/diesel_models/src/enums.rs @@ -401,3 +401,25 @@ pub enum FraudCheckLastStep { TransactionOrRecordRefund, Fulfillment, } + +#[derive( + Clone, + Copy, + Debug, + Default, + Eq, + PartialEq, + serde::Serialize, + serde::Deserialize, + strum::Display, + strum::EnumString, + frunk::LabelledGeneric, +)] +#[router_derive::diesel_enum(storage_type = "text")] +#[serde(rename_all = "snake_case")] +#[strum(serialize_all = "snake_case")] +pub enum UserStatus { + Active, + #[default] + InvitationSent, +} diff --git a/crates/diesel_models/src/lib.rs b/crates/diesel_models/src/lib.rs index 46a6965b3a7b..781099662a50 100644 --- a/crates/diesel_models/src/lib.rs +++ b/crates/diesel_models/src/lib.rs @@ -38,6 +38,8 @@ pub mod reverse_lookup; pub mod routing_algorithm; #[allow(unused_qualifications)] pub mod schema; +pub mod user; +pub mod user_role; use diesel_impl::{DieselArray, OptionalDieselArray}; diff --git a/crates/diesel_models/src/query.rs b/crates/diesel_models/src/query.rs index f315327702ad..cf5a993c2686 100644 --- a/crates/diesel_models/src/query.rs +++ b/crates/diesel_models/src/query.rs @@ -28,3 +28,5 @@ pub mod process_tracker; pub mod refund; pub mod reverse_lookup; pub mod routing_algorithm; +pub mod user; +pub mod user_role; diff --git a/crates/diesel_models/src/query/user.rs b/crates/diesel_models/src/query/user.rs new file mode 100644 index 000000000000..5761d8af814d --- /dev/null +++ b/crates/diesel_models/src/query/user.rs @@ -0,0 +1,62 @@ +use diesel::{associations::HasTable, ExpressionMethods}; +use error_stack::report; +use router_env::tracing::{self, instrument}; + +use crate::{ + errors::{self}, + query::generics, + schema::users::dsl, + user::*, + PgPooledConn, StorageResult, +}; + +impl UserNew { + #[instrument(skip(conn))] + pub async fn insert(self, conn: &PgPooledConn) -> StorageResult { + generics::generic_insert(conn, self).await + } +} + +impl User { + pub async fn find_by_user_email(conn: &PgPooledConn, user_email: &str) -> StorageResult { + generics::generic_find_one::<::Table, _, _>( + conn, + dsl::email.eq(user_email.to_owned()), + ) + .await + } + + pub async fn find_by_user_id(conn: &PgPooledConn, user_id: &str) -> StorageResult { + generics::generic_find_one::<::Table, _, _>( + conn, + dsl::user_id.eq(user_id.to_owned()), + ) + .await + } + + pub async fn update_by_user_id( + conn: &PgPooledConn, + user_id: &str, + user: UserUpdate, + ) -> StorageResult { + generics::generic_update_with_results::<::Table, _, _, _>( + conn, + dsl::user_id.eq(user_id.to_owned()), + UserUpdateInternal::from(user), + ) + .await? + .first() + .cloned() + .ok_or_else(|| { + report!(errors::DatabaseError::NotFound).attach_printable("Error while updating user") + }) + } + + pub async fn delete_by_user_id(conn: &PgPooledConn, user_id: &str) -> StorageResult { + generics::generic_delete::<::Table, _>( + conn, + dsl::user_id.eq(user_id.to_owned()), + ) + .await + } +} diff --git a/crates/diesel_models/src/query/user_role.rs b/crates/diesel_models/src/query/user_role.rs new file mode 100644 index 000000000000..d2f9564a5309 --- /dev/null +++ b/crates/diesel_models/src/query/user_role.rs @@ -0,0 +1,58 @@ +use diesel::{associations::HasTable, BoolExpressionMethods, ExpressionMethods}; +use router_env::tracing::{self, instrument}; + +use crate::{query::generics, schema::user_roles::dsl, user_role::*, PgPooledConn, StorageResult}; + +impl UserRoleNew { + #[instrument(skip(conn))] + pub async fn insert(self, conn: &PgPooledConn) -> StorageResult { + generics::generic_insert(conn, self).await + } +} + +impl UserRole { + pub async fn find_by_user_id(conn: &PgPooledConn, user_id: String) -> StorageResult { + generics::generic_find_one::<::Table, _, _>( + conn, + dsl::user_id.eq(user_id), + ) + .await + } + + pub async fn update_by_user_id_merchant_id( + conn: &PgPooledConn, + user_id: String, + merchant_id: String, + update: UserRoleUpdate, + ) -> StorageResult { + generics::generic_update_with_unique_predicate_get_result::< + ::Table, + _, + _, + _, + >( + conn, + dsl::user_id + .eq(user_id) + .and(dsl::merchant_id.eq(merchant_id)), + UserRoleUpdateInternal::from(update), + ) + .await + } + + pub async fn delete_by_user_id(conn: &PgPooledConn, user_id: String) -> StorageResult { + generics::generic_delete::<::Table, _>(conn, dsl::user_id.eq(user_id)) + .await + } + + pub async fn list_by_user_id(conn: &PgPooledConn, user_id: String) -> StorageResult> { + generics::generic_filter::<::Table, _, _, _>( + conn, + dsl::user_id.eq(user_id), + None, + None, + Some(dsl::created_at.asc()), + ) + .await + } +} diff --git a/crates/diesel_models/src/schema.rs b/crates/diesel_models/src/schema.rs index 6c9cea035b3f..72d5217038c1 100644 --- a/crates/diesel_models/src/schema.rs +++ b/crates/diesel_models/src/schema.rs @@ -900,6 +900,51 @@ diesel::table! { } } +diesel::table! { + use diesel::sql_types::*; + use crate::enums::diesel_exports::*; + + user_roles (id) { + id -> Int4, + #[max_length = 64] + user_id -> Varchar, + #[max_length = 64] + merchant_id -> Varchar, + #[max_length = 64] + role_id -> Varchar, + #[max_length = 64] + org_id -> Varchar, + #[max_length = 64] + status -> Varchar, + #[max_length = 64] + created_by -> Varchar, + #[max_length = 64] + last_modified_by -> Varchar, + created_at -> Timestamp, + last_modified_at -> Timestamp, + } +} + +diesel::table! { + use diesel::sql_types::*; + use crate::enums::diesel_exports::*; + + users (id) { + id -> Int4, + #[max_length = 64] + user_id -> Varchar, + #[max_length = 255] + email -> Varchar, + #[max_length = 255] + name -> Varchar, + #[max_length = 255] + password -> Varchar, + is_verified -> Bool, + created_at -> Timestamp, + last_modified_at -> Timestamp, + } +} + diesel::allow_tables_to_appear_in_same_query!( address, api_keys, @@ -929,4 +974,6 @@ diesel::allow_tables_to_appear_in_same_query!( refund, reverse_lookup, routing_algorithm, + user_roles, + users, ); diff --git a/crates/diesel_models/src/user.rs b/crates/diesel_models/src/user.rs new file mode 100644 index 000000000000..6a2e864b291c --- /dev/null +++ b/crates/diesel_models/src/user.rs @@ -0,0 +1,76 @@ +use common_utils::pii; +use diesel::{AsChangeset, Identifiable, Insertable, Queryable}; +use masking::Secret; +use time::PrimitiveDateTime; + +use crate::schema::users; + +#[derive(Clone, Debug, Identifiable, Queryable)] +#[diesel(table_name = users)] +pub struct User { + pub id: i32, + pub user_id: String, + pub email: pii::Email, + pub name: Secret, + pub password: Secret, + pub is_verified: bool, + pub created_at: PrimitiveDateTime, + pub last_modified_at: PrimitiveDateTime, +} + +#[derive( + router_derive::Setter, Clone, Debug, Default, Insertable, router_derive::DebugAsDisplay, +)] +#[diesel(table_name = users)] +pub struct UserNew { + pub user_id: String, + pub email: pii::Email, + pub name: Secret, + pub password: Secret, + pub is_verified: bool, + pub created_at: Option, + pub last_modified_at: Option, +} + +#[derive(Clone, Debug, AsChangeset, router_derive::DebugAsDisplay)] +#[diesel(table_name = users)] +pub struct UserUpdateInternal { + name: Option, + password: Option>, + is_verified: Option, + last_modified_at: PrimitiveDateTime, +} + +#[derive(Debug)] +pub enum UserUpdate { + VerifyUser, + AccountUpdate { + name: Option, + password: Option>, + is_verified: Option, + }, +} + +impl From for UserUpdateInternal { + fn from(user_update: UserUpdate) -> Self { + let last_modified_at = common_utils::date_time::now(); + match user_update { + UserUpdate::VerifyUser => Self { + name: None, + password: None, + is_verified: Some(true), + last_modified_at, + }, + UserUpdate::AccountUpdate { + name, + password, + is_verified, + } => Self { + name, + password, + is_verified, + last_modified_at, + }, + } + } +} diff --git a/crates/diesel_models/src/user_role.rs b/crates/diesel_models/src/user_role.rs new file mode 100644 index 000000000000..467584ac59db --- /dev/null +++ b/crates/diesel_models/src/user_role.rs @@ -0,0 +1,79 @@ +use diesel::{AsChangeset, Identifiable, Insertable, Queryable}; +use time::PrimitiveDateTime; + +use crate::{enums, schema::user_roles}; + +#[derive(Clone, Debug, Identifiable, Queryable)] +#[diesel(table_name = user_roles)] +pub struct UserRole { + pub id: i32, + pub user_id: String, + pub merchant_id: String, + pub role_id: String, + pub org_id: String, + pub status: enums::UserStatus, + pub created_by: String, + pub last_modified_by: String, + pub created_at: PrimitiveDateTime, + pub last_modified_at: PrimitiveDateTime, +} + +#[derive(router_derive::Setter, Clone, Debug, Insertable, router_derive::DebugAsDisplay)] +#[diesel(table_name = user_roles)] +pub struct UserRoleNew { + pub user_id: String, + pub merchant_id: String, + pub role_id: String, + pub org_id: String, + pub status: enums::UserStatus, + pub created_by: String, + pub last_modified_by: String, + pub created_at: PrimitiveDateTime, + pub last_modified_at: PrimitiveDateTime, +} + +#[derive(Clone, Debug, AsChangeset, router_derive::DebugAsDisplay)] +#[diesel(table_name = user_roles)] +pub struct UserRoleUpdateInternal { + role_id: Option, + status: Option, + last_modified_by: Option, + last_modified_at: PrimitiveDateTime, +} + +pub enum UserRoleUpdate { + UpdateStatus { + status: enums::UserStatus, + modified_by: String, + }, + UpdateRole { + role_id: String, + modified_by: String, + }, +} + +impl From for UserRoleUpdateInternal { + fn from(value: UserRoleUpdate) -> Self { + let last_modified_at = common_utils::date_time::now(); + match value { + UserRoleUpdate::UpdateRole { + role_id, + modified_by, + } => Self { + role_id: Some(role_id), + last_modified_by: Some(modified_by), + status: None, + last_modified_at, + }, + UserRoleUpdate::UpdateStatus { + status, + modified_by, + } => Self { + status: Some(status), + last_modified_at, + last_modified_by: Some(modified_by), + role_id: None, + }, + } + } +} diff --git a/crates/router/src/db.rs b/crates/router/src/db.rs index 6fe34d8dd69b..9687f7f97c92 100644 --- a/crates/router/src/db.rs +++ b/crates/router/src/db.rs @@ -25,6 +25,8 @@ pub mod payouts; pub mod refund; pub mod reverse_lookup; pub mod routing_algorithm; +pub mod user; +pub mod user_role; use data_models::payments::{ payment_attempt::PaymentAttemptInterface, payment_intent::PaymentIntentInterface, @@ -80,6 +82,8 @@ pub trait StorageInterface: + organization::OrganizationInterface + routing_algorithm::RoutingAlgorithmInterface + gsm::GsmInterface + + user::UserInterface + + user_role::UserRoleInterface + 'static { fn get_scheduler_db(&self) -> Box; diff --git a/crates/router/src/db/user.rs b/crates/router/src/db/user.rs new file mode 100644 index 000000000000..6bb1d9e50b6a --- /dev/null +++ b/crates/router/src/db/user.rs @@ -0,0 +1,265 @@ +use diesel_models::user as storage; +use error_stack::{IntoReport, ResultExt}; +use masking::Secret; + +use super::MockDb; +use crate::{ + connection, + core::errors::{self, CustomResult}, + services::Store, +}; + +#[async_trait::async_trait] +pub trait UserInterface { + async fn insert_user( + &self, + user_data: storage::UserNew, + ) -> CustomResult; + + async fn find_user_by_email( + &self, + user_email: &str, + ) -> CustomResult; + + async fn find_user_by_id( + &self, + user_id: &str, + ) -> CustomResult; + + async fn update_user_by_user_id( + &self, + user_id: &str, + user: storage::UserUpdate, + ) -> CustomResult; + + async fn delete_user_by_user_id( + &self, + user_id: &str, + ) -> CustomResult; +} + +#[async_trait::async_trait] +impl UserInterface for Store { + async fn insert_user( + &self, + user_data: storage::UserNew, + ) -> CustomResult { + let conn = connection::pg_connection_write(self).await?; + user_data + .insert(&conn) + .await + .map_err(Into::into) + .into_report() + } + + async fn find_user_by_email( + &self, + user_email: &str, + ) -> CustomResult { + let conn = connection::pg_connection_write(self).await?; + storage::User::find_by_user_email(&conn, user_email) + .await + .map_err(Into::into) + .into_report() + } + + async fn find_user_by_id( + &self, + user_id: &str, + ) -> CustomResult { + let conn = connection::pg_connection_write(self).await?; + storage::User::find_by_user_id(&conn, user_id) + .await + .map_err(Into::into) + .into_report() + } + + async fn update_user_by_user_id( + &self, + user_id: &str, + user: storage::UserUpdate, + ) -> CustomResult { + let conn = connection::pg_connection_write(self).await?; + storage::User::update_by_user_id(&conn, user_id, user) + .await + .map_err(Into::into) + .into_report() + } + + async fn delete_user_by_user_id( + &self, + user_id: &str, + ) -> CustomResult { + let conn = connection::pg_connection_write(self).await?; + storage::User::delete_by_user_id(&conn, user_id) + .await + .map_err(Into::into) + .into_report() + } +} + +#[async_trait::async_trait] +impl UserInterface for MockDb { + async fn insert_user( + &self, + user_data: storage::UserNew, + ) -> CustomResult { + let mut users = self.users.lock().await; + if users + .iter() + .any(|user| user.email == user_data.email || user.user_id == user_data.user_id) + { + Err(errors::StorageError::DuplicateValue { + entity: "email or user_id", + key: None, + })? + } + let time_now = common_utils::date_time::now(); + let user = storage::User { + id: users + .len() + .try_into() + .into_report() + .change_context(errors::StorageError::MockDbError)?, + user_id: user_data.user_id, + email: user_data.email, + name: user_data.name, + password: user_data.password, + is_verified: user_data.is_verified, + created_at: user_data.created_at.unwrap_or(time_now), + last_modified_at: user_data.created_at.unwrap_or(time_now), + }; + users.push(user.clone()); + Ok(user) + } + + async fn find_user_by_email( + &self, + user_email: &str, + ) -> CustomResult { + let users = self.users.lock().await; + let user_email_pii: common_utils::pii::Email = user_email + .to_string() + .try_into() + .map_err(|_| errors::StorageError::MockDbError)?; + users + .iter() + .find(|user| user.email == user_email_pii) + .cloned() + .ok_or( + errors::StorageError::ValueNotFound(format!( + "No user available for email = {user_email}" + )) + .into(), + ) + } + + async fn find_user_by_id( + &self, + user_id: &str, + ) -> CustomResult { + let users = self.users.lock().await; + users + .iter() + .find(|user| user.user_id == user_id) + .cloned() + .ok_or( + errors::StorageError::ValueNotFound(format!( + "No user available for user_id = {user_id}" + )) + .into(), + ) + } + + async fn update_user_by_user_id( + &self, + user_id: &str, + update_user: storage::UserUpdate, + ) -> CustomResult { + let mut users = self.users.lock().await; + users + .iter_mut() + .find(|user| user.user_id == user_id) + .map(|user| { + *user = match &update_user { + storage::UserUpdate::VerifyUser => storage::User { + is_verified: true, + ..user.to_owned() + }, + storage::UserUpdate::AccountUpdate { + name, + password, + is_verified, + } => storage::User { + name: name.clone().map(Secret::new).unwrap_or(user.name.clone()), + password: password.clone().unwrap_or(user.password.clone()), + is_verified: is_verified.unwrap_or(user.is_verified), + ..user.to_owned() + }, + }; + user.to_owned() + }) + .ok_or( + errors::StorageError::ValueNotFound(format!( + "No user available for user_id = {user_id}" + )) + .into(), + ) + } + + async fn delete_user_by_user_id( + &self, + user_id: &str, + ) -> CustomResult { + let mut users = self.users.lock().await; + let user_index = users + .iter() + .position(|user| user.user_id == user_id) + .ok_or(errors::StorageError::ValueNotFound(format!( + "No user available for user_id = {user_id}" + )))?; + users.remove(user_index); + Ok(true) + } +} +#[cfg(feature = "kafka_events")] +#[async_trait::async_trait] +impl UserInterface for super::KafkaStore { + async fn insert_user( + &self, + user_data: storage::UserNew, + ) -> CustomResult { + self.diesel_store.insert_user(user_data).await + } + + async fn find_user_by_email( + &self, + user_email: &str, + ) -> CustomResult { + self.diesel_store.find_user_by_email(user_email).await + } + + async fn find_user_by_id( + &self, + user_id: &str, + ) -> CustomResult { + self.diesel_store.find_user_by_id(user_id).await + } + + async fn update_user_by_user_id( + &self, + user_id: &str, + user: storage::UserUpdate, + ) -> CustomResult { + self.diesel_store + .update_user_by_user_id(user_id, user) + .await + } + + async fn delete_user_by_user_id( + &self, + user_id: &str, + ) -> CustomResult { + self.diesel_store.delete_user_by_user_id(user_id).await + } +} diff --git a/crates/router/src/db/user_role.rs b/crates/router/src/db/user_role.rs new file mode 100644 index 000000000000..37e38e8afca7 --- /dev/null +++ b/crates/router/src/db/user_role.rs @@ -0,0 +1,255 @@ +use diesel_models::user_role as storage; +use error_stack::{IntoReport, ResultExt}; + +use super::MockDb; +use crate::{ + connection, + core::errors::{self, CustomResult}, + services::Store, +}; + +#[async_trait::async_trait] +pub trait UserRoleInterface { + async fn insert_user_role( + &self, + user_role: storage::UserRoleNew, + ) -> CustomResult; + async fn find_user_role_by_user_id( + &self, + user_id: &str, + ) -> CustomResult; + async fn update_user_role_by_user_id_merchant_id( + &self, + user_id: &str, + merchant_id: &str, + update: storage::UserRoleUpdate, + ) -> CustomResult; + async fn delete_user_role(&self, user_id: &str) -> CustomResult; + + async fn list_user_roles_by_user_id( + &self, + user_id: &str, + ) -> CustomResult, errors::StorageError>; +} + +#[async_trait::async_trait] +impl UserRoleInterface for Store { + async fn insert_user_role( + &self, + user_role: storage::UserRoleNew, + ) -> CustomResult { + let conn = connection::pg_connection_write(self).await?; + user_role + .insert(&conn) + .await + .map_err(Into::into) + .into_report() + } + + async fn find_user_role_by_user_id( + &self, + user_id: &str, + ) -> CustomResult { + let conn = connection::pg_connection_write(self).await?; + storage::UserRole::find_by_user_id(&conn, user_id.to_owned()) + .await + .map_err(Into::into) + .into_report() + } + + async fn update_user_role_by_user_id_merchant_id( + &self, + user_id: &str, + merchant_id: &str, + update: storage::UserRoleUpdate, + ) -> CustomResult { + let conn = connection::pg_connection_write(self).await?; + storage::UserRole::update_by_user_id_merchant_id( + &conn, + user_id.to_owned(), + merchant_id.to_owned(), + update, + ) + .await + .map_err(Into::into) + .into_report() + } + + async fn delete_user_role(&self, user_id: &str) -> CustomResult { + let conn = connection::pg_connection_write(self).await?; + storage::UserRole::delete_by_user_id(&conn, user_id.to_owned()) + .await + .map_err(Into::into) + .into_report() + } + + async fn list_user_roles_by_user_id( + &self, + user_id: &str, + ) -> CustomResult, errors::StorageError> { + let conn = connection::pg_connection_write(self).await?; + storage::UserRole::list_by_user_id(&conn, user_id.to_owned()) + .await + .map_err(Into::into) + .into_report() + } +} + +#[async_trait::async_trait] +impl UserRoleInterface for MockDb { + async fn insert_user_role( + &self, + user_role: storage::UserRoleNew, + ) -> CustomResult { + let mut user_roles = self.user_roles.lock().await; + if user_roles + .iter() + .any(|user_role_inner| user_role_inner.user_id == user_role.user_id) + { + Err(errors::StorageError::DuplicateValue { + entity: "user_id", + key: None, + })? + } + let user_role = storage::UserRole { + id: user_roles + .len() + .try_into() + .into_report() + .change_context(errors::StorageError::MockDbError)?, + user_id: user_role.user_id, + merchant_id: user_role.merchant_id, + role_id: user_role.role_id, + status: user_role.status, + created_by: user_role.created_by, + created_at: user_role.created_at, + last_modified_at: user_role.last_modified_at, + last_modified_by: user_role.last_modified_by, + org_id: user_role.org_id, + }; + user_roles.push(user_role.clone()); + Ok(user_role) + } + + async fn find_user_role_by_user_id( + &self, + user_id: &str, + ) -> CustomResult { + let user_roles = self.user_roles.lock().await; + user_roles + .iter() + .find(|user_role| user_role.user_id == user_id) + .cloned() + .ok_or( + errors::StorageError::ValueNotFound(format!( + "No user role available for user_id = {user_id}" + )) + .into(), + ) + } + + async fn update_user_role_by_user_id_merchant_id( + &self, + user_id: &str, + merchant_id: &str, + update: storage::UserRoleUpdate, + ) -> CustomResult { + let mut user_roles = self.user_roles.lock().await; + user_roles + .iter_mut() + .find(|user_role| user_role.user_id == user_id && user_role.merchant_id == merchant_id) + .map(|user_role| { + *user_role = match &update { + storage::UserRoleUpdate::UpdateRole { + role_id, + modified_by, + } => storage::UserRole { + role_id: role_id.to_string(), + last_modified_by: modified_by.to_string(), + ..user_role.to_owned() + }, + storage::UserRoleUpdate::UpdateStatus { + status, + modified_by, + } => storage::UserRole { + status: status.to_owned(), + last_modified_by: modified_by.to_owned(), + ..user_role.to_owned() + }, + }; + user_role.to_owned() + }) + .ok_or( + errors::StorageError::ValueNotFound(format!( + "No user role available for user_id = {user_id} and merchant_id = {merchant_id}" + )) + .into(), + ) + } + + async fn delete_user_role(&self, user_id: &str) -> CustomResult { + let mut user_roles = self.user_roles.lock().await; + let user_role_index = user_roles + .iter() + .position(|user_role| user_role.user_id == user_id) + .ok_or(errors::StorageError::ValueNotFound(format!( + "No user available for user_id = {user_id}" + )))?; + user_roles.remove(user_role_index); + Ok(true) + } + + async fn list_user_roles_by_user_id( + &self, + user_id: &str, + ) -> CustomResult, errors::StorageError> { + let user_roles = self.user_roles.lock().await; + + Ok(user_roles + .iter() + .cloned() + .filter_map(|ele| { + if ele.user_id == user_id { + return Some(ele); + } + None + }) + .collect()) + } +} + +#[cfg(feature = "kafka_events")] +#[async_trait::async_trait] +impl UserRoleInterface for super::KafkaStore { + async fn insert_user_role( + &self, + user_role: storage::UserRoleNew, + ) -> CustomResult { + self.diesel_store.insert_user_role(user_role).await + } + async fn update_user_role_by_user_id_merchant_id( + &self, + user_id: &str, + merchant_id: &str, + update: storage::UserRoleUpdate, + ) -> CustomResult { + self.diesel_store + .update_user_role_by_user_id_merchant_id(user_id, merchant_id, update) + .await + } + async fn find_user_role_by_user_id( + &self, + user_id: &str, + ) -> CustomResult { + self.diesel_store.find_user_role_by_user_id(user_id).await + } + async fn delete_user_role(&self, user_id: &str) -> CustomResult { + self.diesel_store.delete_user_role(user_id).await + } + async fn list_user_roles_by_user_id( + &self, + user_id: &str, + ) -> CustomResult, errors::StorageError> { + self.diesel_store.list_user_roles_by_user_id(user_id).await + } +} diff --git a/crates/router/src/types/storage.rs b/crates/router/src/types/storage.rs index c63ff5fb7f86..e3e19323357b 100644 --- a/crates/router/src/types/storage.rs +++ b/crates/router/src/types/storage.rs @@ -32,6 +32,8 @@ pub mod payout_attempt; pub mod payouts; mod query; pub mod refund; +pub mod user; +pub mod user_role; pub use data_models::payments::{ payment_attempt::{PaymentAttempt, PaymentAttemptNew, PaymentAttemptUpdate}, @@ -44,7 +46,7 @@ pub use self::{ ephemeral_key::*, events::*, file::*, gsm::*, locker_mock_up::*, mandate::*, merchant_account::*, merchant_connector_account::*, merchant_key_store::*, payment_link::*, payment_method::*, payout_attempt::*, payouts::*, process_tracker::*, refund::*, - reverse_lookup::*, routing_algorithm::*, + reverse_lookup::*, routing_algorithm::*, user::*, user_role::*, }; use crate::types::api::routing; diff --git a/crates/router/src/types/storage/user.rs b/crates/router/src/types/storage/user.rs new file mode 100644 index 000000000000..17dc9d365243 --- /dev/null +++ b/crates/router/src/types/storage/user.rs @@ -0,0 +1 @@ +pub use diesel_models::user::*; diff --git a/crates/router/src/types/storage/user_role.rs b/crates/router/src/types/storage/user_role.rs new file mode 100644 index 000000000000..780b9b2971db --- /dev/null +++ b/crates/router/src/types/storage/user_role.rs @@ -0,0 +1 @@ +pub use diesel_models::user_role::*; diff --git a/crates/storage_impl/src/mock_db.rs b/crates/storage_impl/src/mock_db.rs index 33f3f7a77f27..4cdf8e2456bb 100644 --- a/crates/storage_impl/src/mock_db.rs +++ b/crates/storage_impl/src/mock_db.rs @@ -41,6 +41,8 @@ pub struct MockDb { pub reverse_lookups: Arc>>, pub payment_link: Arc>>, pub organizations: Arc>>, + pub users: Arc>>, + pub user_roles: Arc>>, } impl MockDb { @@ -74,6 +76,8 @@ impl MockDb { reverse_lookups: Default::default(), payment_link: Default::default(), organizations: Default::default(), + users: Default::default(), + user_roles: Default::default(), }) } } diff --git a/migrations/2023-11-06-110233_create_user_table/down.sql b/migrations/2023-11-06-110233_create_user_table/down.sql new file mode 100644 index 000000000000..0172a87499bb --- /dev/null +++ b/migrations/2023-11-06-110233_create_user_table/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE users; \ No newline at end of file diff --git a/migrations/2023-11-06-110233_create_user_table/up.sql b/migrations/2023-11-06-110233_create_user_table/up.sql new file mode 100644 index 000000000000..410436c461ce --- /dev/null +++ b/migrations/2023-11-06-110233_create_user_table/up.sql @@ -0,0 +1,14 @@ +-- Your SQL goes here +CREATE TABLE IF NOT EXISTS users ( + id SERIAL PRIMARY KEY, + user_id VARCHAR(64) NOT NULL UNIQUE, + email VARCHAR(255) NOT NULL UNIQUE, + name VARCHAR(255) NOT NULL, + password VARCHAR(255) NOT NULL, + is_verified bool NOT NULL DEFAULT false, + created_at TIMESTAMP NOT NULL DEFAULT now(), + last_modified_at TIMESTAMP NOT NULL DEFAULT now() +); + +CREATE UNIQUE INDEX IF NOT EXISTS user_id_index ON users (user_id); +CREATE UNIQUE INDEX IF NOT EXISTS user_email_index ON users (email); \ No newline at end of file diff --git a/migrations/2023-11-06-113726_create_user_roles_table/down.sql b/migrations/2023-11-06-113726_create_user_roles_table/down.sql new file mode 100644 index 000000000000..5e6350de9e70 --- /dev/null +++ b/migrations/2023-11-06-113726_create_user_roles_table/down.sql @@ -0,0 +1,4 @@ +-- This file should undo anything in `up.sql` + +-- Drop the table +DROP TABLE IF EXISTS user_roles; \ No newline at end of file diff --git a/migrations/2023-11-06-113726_create_user_roles_table/up.sql b/migrations/2023-11-06-113726_create_user_roles_table/up.sql new file mode 100644 index 000000000000..768306721626 --- /dev/null +++ b/migrations/2023-11-06-113726_create_user_roles_table/up.sql @@ -0,0 +1,18 @@ +-- Your SQL goes here +CREATE TABLE IF NOT EXISTS user_roles ( + id SERIAL PRIMARY KEY, + user_id VARCHAR(64) NOT NULL, + merchant_id VARCHAR(64) NOT NULL, + role_id VARCHAR(64) NOT NULL, + org_id VARCHAR(64) NOT NULL, + status VARCHAR(64) NOT NULL, + created_by VARCHAR(64) NOT NULL, + last_modified_by VARCHAR(64) NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT now(), + last_modified_at TIMESTAMP NOT NULL DEFAULT now(), + CONSTRAINT user_merchant_unique UNIQUE (user_id, merchant_id) +); + + +CREATE INDEX IF NOT EXISTS user_id_roles_index ON user_roles (user_id); +CREATE INDEX IF NOT EXISTS user_mid_roles_index ON user_roles (merchant_id); \ No newline at end of file From 2a4f5d13717a78dc2e2e4fc9a492a45b92151dbe Mon Sep 17 00:00:00 2001 From: Sahkal Poddar Date: Fri, 10 Nov 2023 14:39:32 +0530 Subject: [PATCH 2/5] feat(router): added Payment link new design (#2731) Co-authored-by: Sahkal Poddar Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Kashif <46213975+kashif-m@users.noreply.github.com> Co-authored-by: Kashif --- crates/api_models/src/admin.rs | 5 +- crates/api_models/src/payments.rs | 3 +- crates/common_enums/Cargo.toml | 4 +- crates/common_utils/src/consts.rs | 12 + crates/router/src/core/payment_link.rs | 98 +- .../src/core/payment_link/payment_link.html | 1274 +++++++++-------- openapi/openapi_spec.json | 8 +- 7 files changed, 737 insertions(+), 667 deletions(-) diff --git a/crates/api_models/src/admin.rs b/crates/api_models/src/admin.rs index e844d1900a1a..979214a071a9 100644 --- a/crates/api_models/src/admin.rs +++ b/crates/api_models/src/admin.rs @@ -463,9 +463,8 @@ pub struct PaymentLinkConfig { #[serde(deny_unknown_fields)] pub struct PaymentLinkColorSchema { - pub primary_color: Option, - pub primary_accent_color: Option, - pub secondary_color: Option, + pub background_primary_color: Option, + pub sdk_theme: Option, } #[derive(Clone, Debug, Deserialize, ToSchema, Serialize)] diff --git a/crates/api_models/src/payments.rs b/crates/api_models/src/payments.rs index 196dd108333b..22579ed6d6ea 100644 --- a/crates/api_models/src/payments.rs +++ b/crates/api_models/src/payments.rs @@ -3150,6 +3150,7 @@ pub struct PaymentLinkDetails { pub merchant_logo: String, pub return_url: String, pub merchant_name: String, - pub order_details: Vec, + pub order_details: Option>, pub max_items_visible_after_collapse: i8, + pub sdk_theme: Option, } diff --git a/crates/common_enums/Cargo.toml b/crates/common_enums/Cargo.toml index e9f2dffcc050..db37d27ab0f1 100644 --- a/crates/common_enums/Cargo.toml +++ b/crates/common_enums/Cargo.toml @@ -12,9 +12,9 @@ dummy_connector = [] [dependencies] diesel = { version = "2.1.0", features = ["postgres"] } -serde = { version = "1.0.160", features = [ "derive" ] } +serde = { version = "1.0.160", features = ["derive"] } serde_json = "1.0.96" -strum = { version = "0.25", features = [ "derive" ] } +strum = { version = "0.25", features = ["derive"] } time = { version = "0.3.21", features = ["serde", "serde-well-known", "std"] } utoipa = { version = "3.3.0", features = ["preserve_order"] } diff --git a/crates/common_utils/src/consts.rs b/crates/common_utils/src/consts.rs index 2f517295ae48..7bc248bf8d1b 100644 --- a/crates/common_utils/src/consts.rs +++ b/crates/common_utils/src/consts.rs @@ -29,3 +29,15 @@ pub const SURCHARGE_PERCENTAGE_PRECISION_LENGTH: u8 = 2; /// Header Key for application overhead of a request pub const X_HS_LATENCY: &str = "x-hs-latency"; + +/// SDK Default Theme const +pub const DEFAULT_SDK_THEME: &str = "#7EA8F6"; + +/// Default Payment Link Background color +pub const DEFAULT_BACKGROUND_COLOR: &str = "#E5E5E5"; + +/// Default product Img Link +pub const DEFAULT_PRODUCT_IMG: &str = "https://i.imgur.com/On3VtKF.png"; + +/// Default Merchant Logo Link +pub const DEFAULT_MERCHANT_LOGO: &str = "https://i.imgur.com/RfxPFQo.png"; diff --git a/crates/router/src/core/payment_link.rs b/crates/router/src/core/payment_link.rs index 0012efc86c9f..2ea6a4d7f219 100644 --- a/crates/router/src/core/payment_link.rs +++ b/crates/router/src/core/payment_link.rs @@ -1,6 +1,12 @@ use api_models::admin as admin_types; +use common_utils::{ + consts::{ + DEFAULT_BACKGROUND_COLOR, DEFAULT_MERCHANT_LOGO, DEFAULT_PRODUCT_IMG, DEFAULT_SDK_THEME, + }, + ext_traits::ValueExt, +}; use error_stack::{IntoReport, ResultExt}; -use masking::PeekInterface; +use masking::{PeekInterface, Secret}; use super::errors::{self, RouterResult, StorageErrorExt}; use crate::{ @@ -76,12 +82,7 @@ pub async fn intiate_payment_link_flow( }) .transpose()?; - let order_details = payment_intent - .order_details - .get_required_value("order_details") - .change_context(errors::ApiErrorResponse::MissingRequiredField { - field_name: "order_details", - })?; + let order_details = validate_order_details(payment_intent.order_details)?; let return_url = if let Some(payment_create_return_url) = payment_intent.return_url { payment_create_return_url @@ -99,6 +100,9 @@ pub async fn intiate_payment_link_flow( payment_intent.client_secret, )?; + let (default_sdk_theme, default_background_color) = + (DEFAULT_SDK_THEME, DEFAULT_BACKGROUND_COLOR); + let payment_details = api_models::payments::PaymentLinkDetails { amount: payment_intent.amount, currency, @@ -116,13 +120,25 @@ pub async fn intiate_payment_link_flow( client_secret, merchant_logo: payment_link_config .clone() - .map(|pl_metadata| pl_metadata.merchant_logo.unwrap_or_default()) + .map(|pl_config| { + pl_config + .merchant_logo + .unwrap_or(DEFAULT_MERCHANT_LOGO.to_string()) + }) .unwrap_or_default(), max_items_visible_after_collapse: 3, + sdk_theme: payment_link_config.clone().and_then(|pl_config| { + pl_config + .color_scheme + .map(|color| color.sdk_theme.unwrap_or(default_sdk_theme.to_string())) + }), }; let js_script = get_js_script(payment_details)?; - let css_script = get_color_scheme_css(payment_link_config.clone()); + let css_script = get_color_scheme_css( + payment_link_config.clone(), + default_background_color.to_string(), + ); let payment_link_data = services::PaymentLinkFormData { js_script, sdk_url: state.conf.payment_link.sdk_url.clone(), @@ -149,38 +165,21 @@ fn get_js_script( fn get_color_scheme_css( payment_link_config: Option, + default_primary_color: String, ) -> String { - let (default_primary_color, default_accent_color, default_secondary_color) = ( - "#C6C7C8".to_string(), - "#6A8EF5".to_string(), - "#0C48F6".to_string(), - ); - - let (primary_color, primary_accent_color, secondary_color) = payment_link_config + let background_primary_color = payment_link_config .and_then(|pl_config| { pl_config.color_scheme.map(|color| { - ( - color.primary_color.unwrap_or(default_primary_color.clone()), - color - .primary_accent_color - .unwrap_or(default_accent_color.clone()), - color - .secondary_color - .unwrap_or(default_secondary_color.clone()), - ) + color + .background_primary_color + .unwrap_or(default_primary_color.clone()) }) }) - .unwrap_or(( - default_primary_color, - default_accent_color, - default_secondary_color, - )); + .unwrap_or(default_primary_color); format!( ":root {{ - --primary-color: {primary_color}; - --primary-accent-color: {primary_accent_color}; - --secondary-color: {secondary_color}; + --primary-color: {background_primary_color}; }}" ) } @@ -203,3 +202,36 @@ fn validate_sdk_requirements( })?; Ok((pub_key, currency, client_secret)) } + +fn validate_order_details( + order_details: Option>>, +) -> Result< + Option>, + error_stack::Report, +> { + let order_details = order_details + .map(|order_details| { + order_details + .iter() + .map(|data| { + data.to_owned() + .parse_value("OrderDetailsWithAmount") + .change_context(errors::ApiErrorResponse::InvalidDataValue { + field_name: "OrderDetailsWithAmount", + }) + .attach_printable("Unable to parse OrderDetailsWithAmount") + }) + .collect::, _>>() + }) + .transpose()?; + + let updated_order_details = order_details.map(|mut order_details| { + for order in order_details.iter_mut() { + if order.product_img_link.is_none() { + order.product_img_link = Some(DEFAULT_PRODUCT_IMG.to_string()); + } + } + order_details + }); + Ok(updated_order_details) +} diff --git a/crates/router/src/core/payment_link/payment_link.html b/crates/router/src/core/payment_link/payment_link.html index 462a11d2567e..67410cac8418 100644 --- a/crates/router/src/core/payment_link/payment_link.html +++ b/crates/router/src/core/payment_link/payment_link.html @@ -112,8 +112,8 @@ } #hyper-checkout-merchant-image > img { - height: 48px; - width: 48px; + height: 40px; + width: 40px; } #hyper-checkout-cart-image { @@ -175,8 +175,8 @@ } .hyper-checkout-cart-product-image { - height: 72px; - width: 72px; + height: 56px; + width: 56px; } .hyper-checkout-card-item-name { @@ -234,13 +234,21 @@ background-color: var(--primary-color); box-shadow: 0px 1px 10px #f2f2f2; display: flex; + flex-flow: column; align-items: center; justify-content: center; } #payment-form-wrap { - min-width: 584px; - padding: 50px; + min-width: 300px; + width: 30vw; + padding: 20px; + background-color: white; + border-radius: 3px; + } + + .powered-by-hyper { + margin-top: 20px; } #hyper-checkout-sdk-header { @@ -295,28 +303,13 @@ margin-top: 10px; } - .checkoutButton { - height: 48px; - border-radius: 25px; - width: 100%; - border: transparent; - background: var(--secondary-color); - color: #ffffff; - font-weight: 600; - cursor: pointer; - } - .page-spinner, .page-spinner::before, - .page-spinner::after, - .spinner, - .spinner:before, - .spinner:after { + .page-spinner::after { border-radius: 50%; } - .page-spinner, - .spinner { + .page-spinner { color: #ffffff; font-size: 22px; text-indent: -99999px; @@ -331,9 +324,7 @@ } .page-spinner::before, - .page-spinner::after, - .spinner:before, - .spinner:after { + .page-spinner::after { position: absolute; content: ""; } @@ -405,19 +396,6 @@ } } - .spinner:before { - width: 10.4px; - height: 20.4px; - background: var(--primary-color); - border-radius: 20.4px 0 0 20.4px; - top: -0.2px; - left: -0.2px; - -webkit-transform-origin: 10.4px 10.2px; - transform-origin: 10.4px 10.2px; - -webkit-animation: loading 2s infinite ease 1.5s; - animation: loading 2s infinite ease 1.5s; - } - #payment-message { font-size: 12px; font-weight: 500; @@ -426,19 +404,6 @@ font-family: "Montserrat"; } - .spinner:after { - width: 10.4px; - height: 10.2px; - background: var(--primary-color); - border-radius: 0 10.2px 10.2px 0; - top: -0.1px; - left: 10.2px; - -webkit-transform-origin: 0px 10.2px; - transform-origin: 0px 10.2px; - -webkit-animation: loading 2s infinite ease; - animation: loading 2s infinite ease; - } - #payment-form { max-width: 560px; width: 100%; @@ -447,11 +412,6 @@ } @media only screen and (max-width: 1200px) { - .checkoutButton { - width: 95%; - background-color: var(--primary-color); - } - .hyper-checkout { flex-flow: column; margin: 0; @@ -627,16 +587,16 @@ @@ -700,7 +660,7 @@
-
+
-
+ +
+ + + + + + + + + + + + + + + + +
- - + function showSDK(e) { + if (window.state.isMobileView) { + hide("#hyper-checkout-cart"); + } else { + show("#hyper-checkout-cart"); + } + setPageLoading(true); + checkStatus() + .then((res) => { + if (res.showSdk) { + renderPaymentDetails(); + renderCart(); + renderSDKHeader(); + show("#hyper-checkout-sdk"); + show("#hyper-checkout-details"); + } else { + show("#hyper-checkout-status"); + show("#hyper-footer"); + } + }) + .catch((err) => {}) + .finally(() => { + setPageLoading(false); + }); + } + + window.addEventListener("resize", (event) => { + const currentHeight = window.innerHeight; + const currentWidth = window.innerWidth; + if (currentWidth <= 1200 && window.state.prevWidth > 1200) { + hide("#hyper-checkout-cart"); + } else if (currentWidth > 1200 && window.state.prevWidth <= 1200) { + show("#hyper-checkout-cart"); + } + + window.state.prevHeight = currentHeight; + window.state.prevWidth = currentWidth; + window.state.isMobileView = currentWidth <= 1200; + }); + + diff --git a/openapi/openapi_spec.json b/openapi/openapi_spec.json index 6e61f2eb614e..23f8f1b3628b 100644 --- a/openapi/openapi_spec.json +++ b/openapi/openapi_spec.json @@ -7809,15 +7809,11 @@ "PaymentLinkColorSchema": { "type": "object", "properties": { - "primary_color": { + "background_primary_color": { "type": "string", "nullable": true }, - "primary_accent_color": { - "type": "string", - "nullable": true - }, - "secondary_color": { + "sdk_theme": { "type": "string", "nullable": true } From b5ea8db2d2b7e7544931704a7191b42d3a8299be Mon Sep 17 00:00:00 2001 From: Swangi Kumari <85639103+swangi-kumari@users.noreply.github.com> Date: Fri, 10 Nov 2023 16:38:30 +0530 Subject: [PATCH 3/5] refactor(connector): [Zen] change error message from NotSupported to NotImplemented (#2831) --- .../router/src/connector/zen/transformers.rs | 91 +++++++------------ 1 file changed, 32 insertions(+), 59 deletions(-) diff --git a/crates/router/src/connector/zen/transformers.rs b/crates/router/src/connector/zen/transformers.rs index d13c9b6421f4..6b0d46dec8d1 100644 --- a/crates/router/src/connector/zen/transformers.rs +++ b/crates/router/src/connector/zen/transformers.rs @@ -290,10 +290,9 @@ impl | api_models::payments::VoucherData::FamilyMart { .. } | api_models::payments::VoucherData::Seicomart { .. } | api_models::payments::VoucherData::PayEasy { .. } => { - Err(errors::ConnectorError::NotSupported { - message: utils::SELECTED_PAYMENT_METHOD.to_string(), - connector: "Zen", - })? + Err(errors::ConnectorError::NotImplemented( + utils::get_unimplemented_payment_method_error_message("Zen"), + ))? } }; Ok(Self::ApiRequest(Box::new(ApiRequest { @@ -342,12 +341,8 @@ impl api_models::payments::BankTransferData::Pse { .. } => { ZenPaymentChannels::PclBoacompraPse } - api_models::payments::BankTransferData::SepaBankTransfer { .. } => { - Err(errors::ConnectorError::NotImplemented( - utils::get_unimplemented_payment_method_error_message("Zen"), - ))? - } - api_models::payments::BankTransferData::AchBankTransfer { .. } + api_models::payments::BankTransferData::SepaBankTransfer { .. } + | api_models::payments::BankTransferData::AchBankTransfer { .. } | api_models::payments::BankTransferData::BacsBankTransfer { .. } | api_models::payments::BankTransferData::PermataBankTransfer { .. } | api_models::payments::BankTransferData::BcaBankTransfer { .. } @@ -356,10 +351,9 @@ impl | api_models::payments::BankTransferData::CimbVaBankTransfer { .. } | api_models::payments::BankTransferData::DanamonVaBankTransfer { .. } | api_models::payments::BankTransferData::MandiriVaBankTransfer { .. } => { - Err(errors::ConnectorError::NotSupported { - message: utils::SELECTED_PAYMENT_METHOD.to_string(), - connector: "Zen", - })? + Err(errors::ConnectorError::NotImplemented( + utils::get_unimplemented_payment_method_error_message("Zen"), + ))? } }; Ok(Self::ApiRequest(Box::new(ApiRequest { @@ -489,12 +483,8 @@ impl api_models::payments::WalletData::WeChatPayRedirect(_) | api_models::payments::WalletData::PaypalRedirect(_) | api_models::payments::WalletData::ApplePay(_) - | api_models::payments::WalletData::GooglePay(_) => { - Err(errors::ConnectorError::NotImplemented( - utils::get_unimplemented_payment_method_error_message("Zen"), - ))? - } - api_models::payments::WalletData::AliPayQr(_) + | api_models::payments::WalletData::GooglePay(_) + | api_models::payments::WalletData::AliPayQr(_) | api_models::payments::WalletData::AliPayRedirect(_) | api_models::payments::WalletData::AliPayHkRedirect(_) | api_models::payments::WalletData::MomoRedirect(_) @@ -514,10 +504,9 @@ impl | api_models::payments::WalletData::CashappQr(_) | api_models::payments::WalletData::SwishQr(_) | api_models::payments::WalletData::WeChatPayQr(_) => { - Err(errors::ConnectorError::NotSupported { - message: utils::SELECTED_PAYMENT_METHOD.to_string(), - connector: "Zen", - })? + Err(errors::ConnectorError::NotImplemented( + utils::get_unimplemented_payment_method_error_message("Zen"), + ))? } }; let terminal_uuid = session_data @@ -719,10 +708,9 @@ impl TryFrom<&ZenRouterData<&types::PaymentsAuthorizeRouterData>> for ZenPayment | api_models::payments::PaymentMethodData::MandatePayment | api_models::payments::PaymentMethodData::Reward | api_models::payments::PaymentMethodData::Upi(_) => { - Err(errors::ConnectorError::NotSupported { - message: utils::SELECTED_PAYMENT_METHOD.to_string(), - connector: "Zen", - })? + Err(errors::ConnectorError::NotImplemented( + utils::get_unimplemented_payment_method_error_message("Zen"), + ))? } } } @@ -736,13 +724,8 @@ impl TryFrom<&api_models::payments::BankRedirectData> for ZenPaymentsRequest { | api_models::payments::BankRedirectData::Sofort { .. } | api_models::payments::BankRedirectData::BancontactCard { .. } | api_models::payments::BankRedirectData::Blik { .. } - | api_models::payments::BankRedirectData::Trustly { .. } => { - Err(errors::ConnectorError::NotImplemented( - utils::get_unimplemented_payment_method_error_message("Zen"), - ) - .into()) - } - api_models::payments::BankRedirectData::Eps { .. } + | api_models::payments::BankRedirectData::Trustly { .. } + | api_models::payments::BankRedirectData::Eps { .. } | api_models::payments::BankRedirectData::Giropay { .. } | api_models::payments::BankRedirectData::Przelewy24 { .. } | api_models::payments::BankRedirectData::Bizum {} @@ -754,10 +737,9 @@ impl TryFrom<&api_models::payments::BankRedirectData> for ZenPaymentsRequest { | api_models::payments::BankRedirectData::OpenBankingUk { .. } | api_models::payments::BankRedirectData::OnlineBankingFpx { .. } | api_models::payments::BankRedirectData::OnlineBankingThailand { .. } => { - Err(errors::ConnectorError::NotSupported { - message: utils::SELECTED_PAYMENT_METHOD.to_string(), - connector: "Zen", - } + Err(errors::ConnectorError::NotImplemented( + utils::get_unimplemented_payment_method_error_message("Zen"), + ) .into()) } } @@ -776,10 +758,9 @@ impl TryFrom<&api_models::payments::PayLaterData> for ZenPaymentsRequest { | api_models::payments::PayLaterData::WalleyRedirect {} | api_models::payments::PayLaterData::AlmaRedirect {} | api_models::payments::PayLaterData::AtomeRedirect {} => { - Err(errors::ConnectorError::NotSupported { - message: utils::SELECTED_PAYMENT_METHOD.to_string(), - connector: "Zen", - } + Err(errors::ConnectorError::NotImplemented( + utils::get_unimplemented_payment_method_error_message("Zen"), + ) .into()) } } @@ -794,10 +775,9 @@ impl TryFrom<&api_models::payments::BankDebitData> for ZenPaymentsRequest { | api_models::payments::BankDebitData::SepaBankDebit { .. } | api_models::payments::BankDebitData::BecsBankDebit { .. } | api_models::payments::BankDebitData::BacsBankDebit { .. } => { - Err(errors::ConnectorError::NotSupported { - message: utils::SELECTED_PAYMENT_METHOD.to_string(), - connector: "Zen", - } + Err(errors::ConnectorError::NotImplemented( + utils::get_unimplemented_payment_method_error_message("Zen"), + ) .into()) } } @@ -811,10 +791,9 @@ impl TryFrom<&api_models::payments::CardRedirectData> for ZenPaymentsRequest { api_models::payments::CardRedirectData::Knet {} | api_models::payments::CardRedirectData::Benefit {} | api_models::payments::CardRedirectData::MomoAtm {} => { - Err(errors::ConnectorError::NotSupported { - message: utils::SELECTED_PAYMENT_METHOD.to_string(), - connector: "Zen", - } + Err(errors::ConnectorError::NotImplemented( + utils::get_unimplemented_payment_method_error_message("Zen"), + ) .into()) } } @@ -825,19 +804,13 @@ impl TryFrom<&api_models::payments::GiftCardData> for ZenPaymentsRequest { type Error = error_stack::Report; fn try_from(value: &api_models::payments::GiftCardData) -> Result { match value { - api_models::payments::GiftCardData::PaySafeCard {} => { + api_models::payments::GiftCardData::PaySafeCard {} + | api_models::payments::GiftCardData::Givex(_) => { Err(errors::ConnectorError::NotImplemented( utils::get_unimplemented_payment_method_error_message("Zen"), ) .into()) } - api_models::payments::GiftCardData::Givex(_) => { - Err(errors::ConnectorError::NotSupported { - message: utils::SELECTED_PAYMENT_METHOD.to_string(), - connector: "Zen", - } - .into()) - } } } } From f847802339bfedb24cbaa47ad55e31d80cefddca Mon Sep 17 00:00:00 2001 From: ivor-juspay <138492857+ivor-juspay@users.noreply.github.com> Date: Fri, 10 Nov 2023 17:08:09 +0530 Subject: [PATCH 4/5] feat(analytics): analytics APIs (#2792) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Sampras Lopes --- Cargo.lock | 281 ++++++++- config/config.example.toml | 14 +- config/docker_compose.toml | 11 + crates/api_models/src/analytics.rs | 152 +++++ crates/api_models/src/analytics/payments.rs | 180 ++++++ crates/api_models/src/analytics/refunds.rs | 183 ++++++ crates/api_models/src/lib.rs | 1 + crates/common_utils/src/custom_serde.rs | 48 ++ crates/router/Cargo.toml | 5 +- crates/router/src/analytics.rs | 129 +++++ crates/router/src/analytics/core.rs | 96 ++++ crates/router/src/analytics/errors.rs | 32 ++ crates/router/src/analytics/metrics.rs | 9 + .../router/src/analytics/metrics/request.rs | 60 ++ crates/router/src/analytics/payments.rs | 13 + .../src/analytics/payments/accumulator.rs | 150 +++++ crates/router/src/analytics/payments/core.rs | 129 +++++ .../router/src/analytics/payments/filters.rs | 58 ++ .../router/src/analytics/payments/metrics.rs | 137 +++++ .../payments/metrics/avg_ticket_size.rs | 126 +++++ .../payments/metrics/payment_count.rs | 117 ++++ .../metrics/payment_processed_amount.rs | 128 +++++ .../payments/metrics/payment_success_count.rs | 127 +++++ .../payments/metrics/success_rate.rs | 123 ++++ crates/router/src/analytics/payments/types.rs | 46 ++ crates/router/src/analytics/query.rs | 533 ++++++++++++++++++ crates/router/src/analytics/refunds.rs | 10 + .../src/analytics/refunds/accumulator.rs | 110 ++++ crates/router/src/analytics/refunds/core.rs | 104 ++++ .../router/src/analytics/refunds/filters.rs | 59 ++ .../router/src/analytics/refunds/metrics.rs | 126 +++++ .../analytics/refunds/metrics/refund_count.rs | 116 ++++ .../metrics/refund_processed_amount.rs | 122 ++++ .../refunds/metrics/refund_success_count.rs | 122 ++++ .../refunds/metrics/refund_success_rate.rs | 117 ++++ crates/router/src/analytics/refunds/types.rs | 41 ++ crates/router/src/analytics/routes.rs | 145 +++++ crates/router/src/analytics/sqlx.rs | 401 +++++++++++++ crates/router/src/analytics/types.rs | 119 ++++ crates/router/src/analytics/utils.rs | 22 + crates/router/src/configs/settings.rs | 4 + crates/router/src/lib.rs | 3 + crates/router/src/routes.rs | 2 + crates/router/src/routes/app.rs | 12 + crates/router_env/src/lib.rs | 19 +- crates/router_env/src/metrics.rs | 19 + loadtest/config/development.toml | 12 + 47 files changed, 4559 insertions(+), 14 deletions(-) create mode 100644 crates/api_models/src/analytics.rs create mode 100644 crates/api_models/src/analytics/payments.rs create mode 100644 crates/api_models/src/analytics/refunds.rs create mode 100644 crates/router/src/analytics.rs create mode 100644 crates/router/src/analytics/core.rs create mode 100644 crates/router/src/analytics/errors.rs create mode 100644 crates/router/src/analytics/metrics.rs create mode 100644 crates/router/src/analytics/metrics/request.rs create mode 100644 crates/router/src/analytics/payments.rs create mode 100644 crates/router/src/analytics/payments/accumulator.rs create mode 100644 crates/router/src/analytics/payments/core.rs create mode 100644 crates/router/src/analytics/payments/filters.rs create mode 100644 crates/router/src/analytics/payments/metrics.rs create mode 100644 crates/router/src/analytics/payments/metrics/avg_ticket_size.rs create mode 100644 crates/router/src/analytics/payments/metrics/payment_count.rs create mode 100644 crates/router/src/analytics/payments/metrics/payment_processed_amount.rs create mode 100644 crates/router/src/analytics/payments/metrics/payment_success_count.rs create mode 100644 crates/router/src/analytics/payments/metrics/success_rate.rs create mode 100644 crates/router/src/analytics/payments/types.rs create mode 100644 crates/router/src/analytics/query.rs create mode 100644 crates/router/src/analytics/refunds.rs create mode 100644 crates/router/src/analytics/refunds/accumulator.rs create mode 100644 crates/router/src/analytics/refunds/core.rs create mode 100644 crates/router/src/analytics/refunds/filters.rs create mode 100644 crates/router/src/analytics/refunds/metrics.rs create mode 100644 crates/router/src/analytics/refunds/metrics/refund_count.rs create mode 100644 crates/router/src/analytics/refunds/metrics/refund_processed_amount.rs create mode 100644 crates/router/src/analytics/refunds/metrics/refund_success_count.rs create mode 100644 crates/router/src/analytics/refunds/metrics/refund_success_rate.rs create mode 100644 crates/router/src/analytics/refunds/types.rs create mode 100644 crates/router/src/analytics/routes.rs create mode 100644 crates/router/src/analytics/sqlx.rs create mode 100644 crates/router/src/analytics/types.rs create mode 100644 crates/router/src/analytics/utils.rs diff --git a/Cargo.lock b/Cargo.lock index ac7fde55d8e3..c96ce2c18258 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -19,7 +19,7 @@ dependencies = [ "futures-util", "log", "once_cell", - "parking_lot", + "parking_lot 0.12.1", "pin-project-lite", "smallvec", "tokio", @@ -361,6 +361,12 @@ dependencies = [ "alloc-no-stdlib", ] +[[package]] +name = "allocator-api2" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" + [[package]] name = "android-tzdata" version = "0.1.1" @@ -590,6 +596,15 @@ dependencies = [ "syn 2.0.38", ] +[[package]] +name = "atoi" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7c57d12312ff59c811c0643f4d80830505833c9ffaebd193d819392b265be8e" +dependencies = [ + "num-traits", +] + [[package]] name = "atomic" version = "0.5.3" @@ -1139,10 +1154,21 @@ dependencies = [ "async-trait", "futures-channel", "futures-util", - "parking_lot", + "parking_lot 0.12.1", "tokio", ] +[[package]] +name = "bigdecimal" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6773ddc0eafc0e509fb60e48dff7f450f8e674a0686ae8605e8d9901bd5eefa" +dependencies = [ + "num-bigint", + "num-integer", + "num-traits", +] + [[package]] name = "bincode" version = "1.3.3" @@ -1632,6 +1658,21 @@ dependencies = [ "libc", ] +[[package]] +name = "crc" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86ec7a15cbe22e59248fc7eadb1907dab5ba09372595da4d73dd805ed4417dfe" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cace84e55f07e7301bae1c519df89cdad8cc3cd868413d3fdbdeca9ff3db484" + [[package]] name = "crc16" version = "0.4.0" @@ -1726,6 +1767,16 @@ dependencies = [ "scopeguard", ] +[[package]] +name = "crossbeam-queue" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" +dependencies = [ + "cfg-if", + "crossbeam-utils", +] + [[package]] name = "crossbeam-utils" version = "0.8.16" @@ -1825,7 +1876,7 @@ dependencies = [ "hashbrown 0.14.1", "lock_api", "once_cell", - "parking_lot_core", + "parking_lot_core 0.9.8", ] [[package]] @@ -2048,6 +2099,12 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0688c2a7f92e427f44895cd63841bff7b29f8d7a1648b9e7e07a4a365b2e1257" +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + [[package]] name = "drainer" version = "0.1.0" @@ -2269,6 +2326,12 @@ version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" +[[package]] +name = "finl_unicode" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" + [[package]] name = "flate2" version = "1.0.27" @@ -2334,7 +2397,7 @@ dependencies = [ "futures", "lazy_static", "log", - "parking_lot", + "parking_lot 0.12.1", "rand 0.8.5", "redis-protocol", "semver", @@ -2441,6 +2504,17 @@ dependencies = [ "futures-util", ] +[[package]] +name = "futures-intrusive" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a604f7a68fbf8103337523b1fadc8ade7361ee3f112f7c680ad179651616aed5" +dependencies = [ + "futures-core", + "lock_api", + "parking_lot 0.11.2", +] + [[package]] name = "futures-io" version = "0.3.28" @@ -2651,12 +2725,28 @@ name = "hashbrown" version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7dfda62a12f55daeae5015f81b0baea145391cb4520f86c248fc615d72640d12" +dependencies = [ + "ahash 0.8.3", + "allocator-api2", +] + +[[package]] +name = "hashlink" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" +dependencies = [ + "hashbrown 0.14.1", +] [[package]] name = "heck" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" +dependencies = [ + "unicode-segmentation", +] [[package]] name = "hermit-abi" @@ -2670,6 +2760,15 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +[[package]] +name = "hkdf" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437" +dependencies = [ + "hmac", +] + [[package]] name = "hmac" version = "0.12.1" @@ -3377,7 +3476,7 @@ dependencies = [ "crossbeam-utils", "futures-util", "once_cell", - "parking_lot", + "parking_lot 0.12.1", "quanta", "rustc_version", "scheduled-thread-pool", @@ -3692,6 +3791,17 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e52c774a4c39359c1d1c52e43f73dd91a75a614652c825408eec30c95a9b2067" +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core 0.8.6", +] + [[package]] name = "parking_lot" version = "0.12.1" @@ -3699,7 +3809,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core", + "parking_lot_core 0.9.8", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" +dependencies = [ + "cfg-if", + "instant", + "libc", + "redox_syscall 0.2.16", + "smallvec", + "winapi", ] [[package]] @@ -4115,7 +4239,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51de85fb3fb6524929c8a2eb85e6b6d363de4e8c48f9e2c2eac4944abc181c93" dependencies = [ "log", - "parking_lot", + "parking_lot 0.12.1", "scheduled-thread-pool", ] @@ -4445,10 +4569,12 @@ dependencies = [ "aws-sdk-s3", "base64 0.21.4", "bb8", + "bigdecimal", "blake3", "bytes", "cards", "clap", + "common_enums", "common_utils", "config", "data_models", @@ -4501,6 +4627,7 @@ dependencies = [ "sha-1 0.9.8", "signal-hook", "signal-hook-tokio", + "sqlx", "storage_impl", "strum 0.24.1", "tera", @@ -4774,7 +4901,7 @@ version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3cbc66816425a074528352f5789333ecff06ca41b36b0b0efdfbb29edc391a19" dependencies = [ - "parking_lot", + "parking_lot 0.12.1", ] [[package]] @@ -5012,7 +5139,7 @@ dependencies = [ "futures", "lazy_static", "log", - "parking_lot", + "parking_lot 0.12.1", "serial_test_derive", ] @@ -5205,6 +5332,111 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" +[[package]] +name = "sqlformat" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b7b278788e7be4d0d29c0f39497a0eef3fba6bbc8e70d8bf7fde46edeaa9e85" +dependencies = [ + "itertools 0.11.0", + "nom", + "unicode_categories", +] + +[[package]] +name = "sqlx" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8de3b03a925878ed54a954f621e64bf55a3c1bd29652d0d1a17830405350188" +dependencies = [ + "sqlx-core", + "sqlx-macros", +] + +[[package]] +name = "sqlx-core" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa8241483a83a3f33aa5fff7e7d9def398ff9990b2752b6c6112b83c6d246029" +dependencies = [ + "ahash 0.7.6", + "atoi", + "base64 0.13.1", + "bigdecimal", + "bitflags 1.3.2", + "byteorder", + "bytes", + "crc", + "crossbeam-queue", + "dirs", + "dotenvy", + "either", + "event-listener", + "futures-channel", + "futures-core", + "futures-intrusive", + "futures-util", + "hashlink", + "hex", + "hkdf", + "hmac", + "indexmap 1.9.3", + "itoa", + "libc", + "log", + "md-5", + "memchr", + "num-bigint", + "once_cell", + "paste", + "percent-encoding", + "rand 0.8.5", + "serde", + "serde_json", + "sha1", + "sha2", + "smallvec", + "sqlformat", + "sqlx-rt", + "stringprep", + "thiserror", + "time", + "tokio-stream", + "url", + "whoami", +] + +[[package]] +name = "sqlx-macros" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9966e64ae989e7e575b19d7265cb79d7fc3cbbdf179835cb0d716f294c2049c9" +dependencies = [ + "dotenvy", + "either", + "heck", + "once_cell", + "proc-macro2", + "quote", + "sha2", + "sqlx-core", + "sqlx-rt", + "syn 1.0.109", + "url", +] + +[[package]] +name = "sqlx-rt" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "804d3f245f894e61b1e6263c84b23ca675d96753b5abfd5cc8597d86806e8024" +dependencies = [ + "native-tls", + "once_cell", + "tokio", + "tokio-native-tls", +] + [[package]] name = "storage_impl" version = "0.1.0" @@ -5249,6 +5481,17 @@ dependencies = [ "regex", ] +[[package]] +name = "stringprep" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb41d74e231a107a1b4ee36bd1214b11285b77768d2e3824aedafa988fd36ee6" +dependencies = [ + "finl_unicode", + "unicode-bidi", + "unicode-normalization", +] + [[package]] name = "strsim" version = "0.10.0" @@ -5483,7 +5726,7 @@ dependencies = [ "futures", "http", "log", - "parking_lot", + "parking_lot 0.12.1", "serde", "serde_json", "serde_repr", @@ -5611,7 +5854,7 @@ dependencies = [ "libc", "mio", "num_cpus", - "parking_lot", + "parking_lot 0.12.1", "pin-project-lite", "signal-hook-registry", "socket2 0.5.4", @@ -6040,6 +6283,12 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" +[[package]] +name = "unicode_categories" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" + [[package]] name = "unidecode" version = "0.3.0" @@ -6330,6 +6579,16 @@ version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9193164d4de03a926d909d3bc7c30543cecb35400c02114792c2cae20d5e2dbb" +[[package]] +name = "whoami" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22fc3756b8a9133049b26c7f61ab35416c130e8c09b660f5b3958b446f52cc50" +dependencies = [ + "wasm-bindgen", + "web-sys", +] + [[package]] name = "winapi" version = "0.3.9" diff --git a/config/config.example.toml b/config/config.example.toml index ed9cf9698984..f0083bb48b19 100644 --- a/config/config.example.toml +++ b/config/config.example.toml @@ -434,10 +434,22 @@ apple_pay_ppc_key = "APPLE_PAY_PAYMENT_PROCESSING_CERTIFICATE_KEY" #Private apple_pay_merchant_cert = "APPLE_PAY_MERCHNAT_CERTIFICATE" #Merchant Certificate provided by Apple Pay (https://developer.apple.com/) Certificates, Identifiers & Profiles > Apple Pay Merchant Identity Certificate apple_pay_merchant_cert_key = "APPLE_PAY_MERCHNAT_CERTIFICATE_KEY" #Private key generate by RSA:2048 algorithm - [payment_link] sdk_url = "http://localhost:9090/dist/HyperLoader.js" +# Analytics configuration. +[analytics] +source = "sqlx" # The Analytics source/strategy to be used + +[analytics.sqlx] +username = "db_user" # Analytics DB Username +password = "db_pass" # Analytics DB Password +host = "localhost" # Analytics DB Host +port = 5432 # Analytics DB Port +dbname = "hyperswitch_db" # Name of Database +pool_size = 5 # Number of connections to keep open +connection_timeout = 10 # Timeout for database connection in seconds + # Config for KV setup [kv_config] # TTL for KV in seconds diff --git a/config/docker_compose.toml b/config/docker_compose.toml index 282894b56d43..ddda7e7021a4 100644 --- a/config/docker_compose.toml +++ b/config/docker_compose.toml @@ -319,5 +319,16 @@ supported_connectors = "braintree" redis_lock_expiry_seconds = 180 # 3 * 60 seconds delay_between_retries_in_milliseconds = 500 +[analytics] +source = "sqlx" + +[analytics.sqlx] +username = "db_user" +password = "db_pass" +host = "pg" +port = 5432 +dbname = "hyperswitch_db" +pool_size = 5 + [kv_config] ttl = 900 # 15 * 60 seconds diff --git a/crates/api_models/src/analytics.rs b/crates/api_models/src/analytics.rs new file mode 100644 index 000000000000..0358b6b313cf --- /dev/null +++ b/crates/api_models/src/analytics.rs @@ -0,0 +1,152 @@ +use std::collections::HashSet; + +use common_utils::events::ApiEventMetric; +use time::PrimitiveDateTime; + +use self::{ + payments::{PaymentDimensions, PaymentMetrics}, + refunds::{RefundDimensions, RefundMetrics}, +}; + +pub mod payments; +pub mod refunds; + +#[derive(Debug, serde::Serialize)] +pub struct NameDescription { + pub name: String, + pub desc: String, +} + +#[derive(Debug, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct GetInfoResponse { + pub metrics: Vec, + pub download_dimensions: Option>, + pub dimensions: Vec, +} + +impl ApiEventMetric for GetInfoResponse {} + +#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize, PartialEq, Eq, Hash)] +#[serde(rename_all = "camelCase")] +pub struct TimeRange { + #[serde(with = "common_utils::custom_serde::iso8601")] + pub start_time: PrimitiveDateTime, + #[serde(default, with = "common_utils::custom_serde::iso8601::option")] + pub end_time: Option, +} + +#[derive(Clone, Copy, Debug, serde::Deserialize, masking::Serialize)] +pub struct TimeSeries { + pub granularity: Granularity, +} + +#[derive(Clone, Copy, Debug, serde::Deserialize, masking::Serialize)] +pub enum Granularity { + #[serde(rename = "G_ONEMIN")] + OneMin, + #[serde(rename = "G_FIVEMIN")] + FiveMin, + #[serde(rename = "G_FIFTEENMIN")] + FifteenMin, + #[serde(rename = "G_THIRTYMIN")] + ThirtyMin, + #[serde(rename = "G_ONEHOUR")] + OneHour, + #[serde(rename = "G_ONEDAY")] + OneDay, +} + +#[derive(Clone, Debug, serde::Deserialize, masking::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct GetPaymentMetricRequest { + pub time_series: Option, + pub time_range: TimeRange, + #[serde(default)] + pub group_by_names: Vec, + #[serde(default)] + pub filters: payments::PaymentFilters, + pub metrics: HashSet, + #[serde(default)] + pub delta: bool, +} + +impl ApiEventMetric for GetPaymentMetricRequest {} + +#[derive(Clone, Debug, serde::Deserialize, masking::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct GetRefundMetricRequest { + pub time_series: Option, + pub time_range: TimeRange, + #[serde(default)] + pub group_by_names: Vec, + #[serde(default)] + pub filters: refunds::RefundFilters, + pub metrics: HashSet, + #[serde(default)] + pub delta: bool, +} + +impl ApiEventMetric for GetRefundMetricRequest {} + +#[derive(Debug, serde::Serialize)] +pub struct AnalyticsMetadata { + pub current_time_range: TimeRange, +} + +#[derive(Debug, serde::Deserialize, masking::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct GetPaymentFiltersRequest { + pub time_range: TimeRange, + #[serde(default)] + pub group_by_names: Vec, +} + +impl ApiEventMetric for GetPaymentFiltersRequest {} + +#[derive(Debug, Default, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct PaymentFiltersResponse { + pub query_data: Vec, +} + +impl ApiEventMetric for PaymentFiltersResponse {} + +#[derive(Debug, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct FilterValue { + pub dimension: PaymentDimensions, + pub values: Vec, +} + +#[derive(Debug, serde::Deserialize, masking::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct GetRefundFilterRequest { + pub time_range: TimeRange, + #[serde(default)] + pub group_by_names: Vec, +} + +impl ApiEventMetric for GetRefundFilterRequest {} + +#[derive(Debug, Default, serde::Serialize, Eq, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct RefundFiltersResponse { + pub query_data: Vec, +} + +impl ApiEventMetric for RefundFiltersResponse {} + +#[derive(Debug, serde::Serialize, Eq, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct RefundFilterValue { + pub dimension: RefundDimensions, + pub values: Vec, +} + +#[derive(Debug, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct MetricsResponse { + pub query_data: Vec, + pub meta_data: [AnalyticsMetadata; 1], +} diff --git a/crates/api_models/src/analytics/payments.rs b/crates/api_models/src/analytics/payments.rs new file mode 100644 index 000000000000..b5e5852d6283 --- /dev/null +++ b/crates/api_models/src/analytics/payments.rs @@ -0,0 +1,180 @@ +use std::{ + collections::hash_map::DefaultHasher, + hash::{Hash, Hasher}, +}; + +use common_enums::enums::{AttemptStatus, AuthenticationType, Currency, PaymentMethod}; +use common_utils::events::ApiEventMetric; + +use super::{NameDescription, TimeRange}; +use crate::{analytics::MetricsResponse, enums::Connector}; + +#[derive(Clone, Debug, Default, serde::Deserialize, masking::Serialize)] +pub struct PaymentFilters { + #[serde(default)] + pub currency: Vec, + #[serde(default)] + pub status: Vec, + #[serde(default)] + pub connector: Vec, + #[serde(default)] + pub auth_type: Vec, + #[serde(default)] + pub payment_method: Vec, +} + +#[derive( + Debug, + serde::Serialize, + serde::Deserialize, + strum::AsRefStr, + PartialEq, + PartialOrd, + Eq, + Ord, + strum::Display, + strum::EnumIter, + Clone, + Copy, +)] +#[serde(rename_all = "snake_case")] +#[strum(serialize_all = "snake_case")] +pub enum PaymentDimensions { + // Do not change the order of these enums + // Consult the Dashboard FE folks since these also affects the order of metrics on FE + Connector, + PaymentMethod, + Currency, + #[strum(serialize = "authentication_type")] + #[serde(rename = "authentication_type")] + AuthType, + #[strum(serialize = "status")] + #[serde(rename = "status")] + PaymentStatus, +} + +#[derive( + Clone, + Debug, + Hash, + PartialEq, + Eq, + serde::Serialize, + serde::Deserialize, + strum::Display, + strum::EnumIter, + strum::AsRefStr, +)] +#[strum(serialize_all = "snake_case")] +#[serde(rename_all = "snake_case")] +pub enum PaymentMetrics { + PaymentSuccessRate, + PaymentCount, + PaymentSuccessCount, + PaymentProcessedAmount, + AvgTicketSize, +} + +pub mod metric_behaviour { + pub struct PaymentSuccessRate; + pub struct PaymentCount; + pub struct PaymentSuccessCount; + pub struct PaymentProcessedAmount; + pub struct AvgTicketSize; +} + +impl From for NameDescription { + fn from(value: PaymentMetrics) -> Self { + Self { + name: value.to_string(), + desc: String::new(), + } + } +} + +impl From for NameDescription { + fn from(value: PaymentDimensions) -> Self { + Self { + name: value.to_string(), + desc: String::new(), + } + } +} + +#[derive(Debug, serde::Serialize, Eq)] +pub struct PaymentMetricsBucketIdentifier { + pub currency: Option, + pub status: Option, + pub connector: Option, + #[serde(rename = "authentication_type")] + pub auth_type: Option, + pub payment_method: Option, + #[serde(rename = "time_range")] + pub time_bucket: TimeRange, + // Coz FE sucks + #[serde(rename = "time_bucket")] + #[serde(with = "common_utils::custom_serde::iso8601custom")] + pub start_time: time::PrimitiveDateTime, +} + +impl PaymentMetricsBucketIdentifier { + pub fn new( + currency: Option, + status: Option, + connector: Option, + auth_type: Option, + payment_method: Option, + normalized_time_range: TimeRange, + ) -> Self { + Self { + currency, + status, + connector, + auth_type, + payment_method, + time_bucket: normalized_time_range, + start_time: normalized_time_range.start_time, + } + } +} + +impl Hash for PaymentMetricsBucketIdentifier { + fn hash(&self, state: &mut H) { + self.currency.hash(state); + self.status.map(|i| i.to_string()).hash(state); + self.connector.hash(state); + self.auth_type.map(|i| i.to_string()).hash(state); + self.payment_method.hash(state); + self.time_bucket.hash(state); + } +} + +impl PartialEq for PaymentMetricsBucketIdentifier { + fn eq(&self, other: &Self) -> bool { + let mut left = DefaultHasher::new(); + self.hash(&mut left); + let mut right = DefaultHasher::new(); + other.hash(&mut right); + left.finish() == right.finish() + } +} + +#[derive(Debug, serde::Serialize)] +pub struct PaymentMetricsBucketValue { + pub payment_success_rate: Option, + pub payment_count: Option, + pub payment_success_count: Option, + pub payment_processed_amount: Option, + pub avg_ticket_size: Option, +} + +#[derive(Debug, serde::Serialize)] +pub struct MetricsBucketResponse { + #[serde(flatten)] + pub values: PaymentMetricsBucketValue, + #[serde(flatten)] + pub dimensions: PaymentMetricsBucketIdentifier, +} + +impl ApiEventMetric for MetricsBucketResponse {} +impl ApiEventMetric for MetricsResponse {} diff --git a/crates/api_models/src/analytics/refunds.rs b/crates/api_models/src/analytics/refunds.rs new file mode 100644 index 000000000000..c5d444338d38 --- /dev/null +++ b/crates/api_models/src/analytics/refunds.rs @@ -0,0 +1,183 @@ +use std::{ + collections::hash_map::DefaultHasher, + hash::{Hash, Hasher}, +}; + +use common_enums::enums::{Currency, RefundStatus}; +use common_utils::events::ApiEventMetric; + +use crate::analytics::MetricsResponse; + +#[derive( + Clone, + Copy, + Debug, + Default, + Eq, + PartialEq, + serde::Serialize, + serde::Deserialize, + strum::Display, + strum::EnumString, +)] +// TODO RefundType common_enums need to mapped to storage_model +#[serde(rename_all = "snake_case")] +#[strum(serialize_all = "snake_case")] +pub enum RefundType { + InstantRefund, + #[default] + RegularRefund, + RetryRefund, +} + +use super::{NameDescription, TimeRange}; +#[derive(Clone, Debug, Default, serde::Deserialize, masking::Serialize)] +pub struct RefundFilters { + #[serde(default)] + pub currency: Vec, + #[serde(default)] + pub refund_status: Vec, + #[serde(default)] + pub connector: Vec, + #[serde(default)] + pub refund_type: Vec, +} + +#[derive( + Debug, + serde::Serialize, + serde::Deserialize, + strum::AsRefStr, + PartialEq, + PartialOrd, + Eq, + Ord, + strum::Display, + strum::EnumIter, + Clone, + Copy, +)] +#[serde(rename_all = "snake_case")] +#[strum(serialize_all = "snake_case")] +pub enum RefundDimensions { + Currency, + RefundStatus, + Connector, + RefundType, +} + +#[derive( + Clone, + Debug, + Hash, + PartialEq, + Eq, + serde::Serialize, + serde::Deserialize, + strum::Display, + strum::EnumIter, + strum::AsRefStr, +)] +#[strum(serialize_all = "snake_case")] +#[serde(rename_all = "snake_case")] +pub enum RefundMetrics { + RefundSuccessRate, + RefundCount, + RefundSuccessCount, + RefundProcessedAmount, +} + +pub mod metric_behaviour { + pub struct RefundSuccessRate; + pub struct RefundCount; + pub struct RefundSuccessCount; + pub struct RefundProcessedAmount; +} + +impl From for NameDescription { + fn from(value: RefundMetrics) -> Self { + Self { + name: value.to_string(), + desc: String::new(), + } + } +} + +impl From for NameDescription { + fn from(value: RefundDimensions) -> Self { + Self { + name: value.to_string(), + desc: String::new(), + } + } +} + +#[derive(Debug, serde::Serialize, Eq)] +pub struct RefundMetricsBucketIdentifier { + pub currency: Option, + pub refund_status: Option, + pub connector: Option, + pub refund_type: Option, + #[serde(rename = "time_range")] + pub time_bucket: TimeRange, + #[serde(rename = "time_bucket")] + #[serde(with = "common_utils::custom_serde::iso8601custom")] + pub start_time: time::PrimitiveDateTime, +} + +impl Hash for RefundMetricsBucketIdentifier { + fn hash(&self, state: &mut H) { + self.currency.hash(state); + self.refund_status.map(|i| i.to_string()).hash(state); + self.connector.hash(state); + self.refund_type.hash(state); + self.time_bucket.hash(state); + } +} +impl PartialEq for RefundMetricsBucketIdentifier { + fn eq(&self, other: &Self) -> bool { + let mut left = DefaultHasher::new(); + self.hash(&mut left); + let mut right = DefaultHasher::new(); + other.hash(&mut right); + left.finish() == right.finish() + } +} + +impl RefundMetricsBucketIdentifier { + pub fn new( + currency: Option, + refund_status: Option, + connector: Option, + refund_type: Option, + normalized_time_range: TimeRange, + ) -> Self { + Self { + currency, + refund_status, + connector, + refund_type, + time_bucket: normalized_time_range, + start_time: normalized_time_range.start_time, + } + } +} + +#[derive(Debug, serde::Serialize)] +pub struct RefundMetricsBucketValue { + pub refund_success_rate: Option, + pub refund_count: Option, + pub refund_success_count: Option, + pub refund_processed_amount: Option, +} + +#[derive(Debug, serde::Serialize)] +pub struct RefundMetricsBucketResponse { + #[serde(flatten)] + pub values: RefundMetricsBucketValue, + #[serde(flatten)] + pub dimensions: RefundMetricsBucketIdentifier, +} + +impl ApiEventMetric for RefundMetricsBucketResponse {} +impl ApiEventMetric for MetricsResponse {} diff --git a/crates/api_models/src/lib.rs b/crates/api_models/src/lib.rs index 5da916b14817..75509ed7386d 100644 --- a/crates/api_models/src/lib.rs +++ b/crates/api_models/src/lib.rs @@ -1,5 +1,6 @@ #![forbid(unsafe_code)] pub mod admin; +pub mod analytics; pub mod api_keys; pub mod bank_accounts; pub mod cards_info; diff --git a/crates/common_utils/src/custom_serde.rs b/crates/common_utils/src/custom_serde.rs index d64abe38e5b0..edbfa143a667 100644 --- a/crates/common_utils/src/custom_serde.rs +++ b/crates/common_utils/src/custom_serde.rs @@ -170,3 +170,51 @@ pub mod json_string { serde_json::from_str(&j).map_err(de::Error::custom) } } + +/// Use a custom ISO 8601 format when serializing and deserializing +/// [`PrimitiveDateTime`][PrimitiveDateTime]. +/// +/// [PrimitiveDateTime]: ::time::PrimitiveDateTime +pub mod iso8601custom { + + use serde::{ser::Error as _, Deserializer, Serialize, Serializer}; + use time::{ + format_description::well_known::{ + iso8601::{Config, EncodedConfig, TimePrecision}, + Iso8601, + }, + serde::iso8601, + PrimitiveDateTime, UtcOffset, + }; + + const FORMAT_CONFIG: EncodedConfig = Config::DEFAULT + .set_time_precision(TimePrecision::Second { + decimal_digits: None, + }) + .encode(); + + /// Serialize a [`PrimitiveDateTime`] using the well-known ISO 8601 format. + pub fn serialize(date_time: &PrimitiveDateTime, serializer: S) -> Result + where + S: Serializer, + { + date_time + .assume_utc() + .format(&Iso8601::) + .map_err(S::Error::custom)? + .replace('T', " ") + .replace('Z', "") + .serialize(serializer) + } + + /// Deserialize an [`PrimitiveDateTime`] from its ISO 8601 representation. + pub fn deserialize<'a, D>(deserializer: D) -> Result + where + D: Deserializer<'a>, + { + iso8601::deserialize(deserializer).map(|offset_date_time| { + let utc_date_time = offset_date_time.to_offset(UtcOffset::UTC); + PrimitiveDateTime::new(utc_date_time.date(), utc_date_time.time()) + }) + } +} diff --git a/crates/router/Cargo.toml b/crates/router/Cargo.toml index 9ab955813336..7456944a8e4e 100644 --- a/crates/router/Cargo.toml +++ b/crates/router/Cargo.toml @@ -15,7 +15,7 @@ kms = ["external_services/kms", "dep:aws-config"] email = ["external_services/email", "dep:aws-config"] basilisk = ["kms"] stripe = ["dep:serde_qs"] -release = ["kms", "stripe", "basilisk", "s3", "email", "business_profile_routing", "accounts_cache", "kv_store"] +release = ["kms", "stripe", "basilisk", "s3", "email", "business_profile_routing", "accounts_cache", "kv_store", "olap"] olap = ["data_models/olap", "storage_impl/olap", "scheduler/olap"] oltp = ["data_models/oltp", "storage_impl/oltp"] kv_store = ["scheduler/kv_store"] @@ -44,6 +44,7 @@ aws-config = { version = "0.55.3", optional = true } aws-sdk-s3 = { version = "0.28.0", optional = true } base64 = "0.21.2" bb8 = "0.8" +bigdecimal = "0.3.1" blake3 = "1.3.3" bytes = "1.4.0" clap = { version = "4.3.2", default-features = false, features = ["std", "derive", "help", "usage"] } @@ -83,6 +84,7 @@ serde_urlencoded = "0.7.1" serde_with = "3.0.0" signal-hook = "0.3.15" strum = { version = "0.24.1", features = ["derive"] } +sqlx = { version = "0.6.3", features = ["postgres", "runtime-actix", "runtime-actix-native-tls", "time", "bigdecimal"] } thiserror = "1.0.40" time = { version = "0.3.21", features = ["serde", "serde-well-known", "std"] } tokio = { version = "1.28.2", features = ["macros", "rt-multi-thread"] } @@ -100,6 +102,7 @@ digest = "0.9" api_models = { version = "0.1.0", path = "../api_models", features = ["errors"] } cards = { version = "0.1.0", path = "../cards" } common_utils = { version = "0.1.0", path = "../common_utils", features = ["signals", "async_ext", "logs"] } +common_enums = { version = "0.1.0", path = "../common_enums"} external_services = { version = "0.1.0", path = "../external_services" } euclid = { version = "0.1.0", path = "../euclid", features = ["valued_jit"] } masking = { version = "0.1.0", path = "../masking" } diff --git a/crates/router/src/analytics.rs b/crates/router/src/analytics.rs new file mode 100644 index 000000000000..d57403d92989 --- /dev/null +++ b/crates/router/src/analytics.rs @@ -0,0 +1,129 @@ +mod core; +mod errors; +pub mod metrics; +mod payments; +mod query; +mod refunds; +pub mod routes; + +mod sqlx; +mod types; +mod utils; + +use api_models::analytics::{ + payments::{PaymentDimensions, PaymentFilters, PaymentMetrics, PaymentMetricsBucketIdentifier}, + refunds::{RefundDimensions, RefundFilters, RefundMetrics, RefundMetricsBucketIdentifier}, + Granularity, TimeRange, +}; +use router_env::{instrument, tracing}; + +use self::{ + payments::metrics::{PaymentMetric, PaymentMetricRow}, + refunds::metrics::{RefundMetric, RefundMetricRow}, + sqlx::SqlxClient, +}; +use crate::configs::settings::Database; + +#[derive(Clone, Debug)] +pub enum AnalyticsProvider { + Sqlx(SqlxClient), +} + +impl Default for AnalyticsProvider { + fn default() -> Self { + Self::Sqlx(SqlxClient::default()) + } +} + +impl AnalyticsProvider { + #[instrument(skip_all)] + pub async fn get_payment_metrics( + &self, + metric: &PaymentMetrics, + dimensions: &[PaymentDimensions], + merchant_id: &str, + filters: &PaymentFilters, + granularity: &Option, + time_range: &TimeRange, + ) -> types::MetricsResult> { + // Metrics to get the fetch time for each payment metric + metrics::request::record_operation_time( + async { + match self { + Self::Sqlx(pool) => { + metric + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + } + }, + &metrics::METRIC_FETCH_TIME, + metric, + self, + ) + .await + } + + pub async fn get_refund_metrics( + &self, + metric: &RefundMetrics, + dimensions: &[RefundDimensions], + merchant_id: &str, + filters: &RefundFilters, + granularity: &Option, + time_range: &TimeRange, + ) -> types::MetricsResult> { + match self { + Self::Sqlx(pool) => { + metric + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + } + } + + pub async fn from_conf( + config: &AnalyticsConfig, + #[cfg(feature = "kms")] kms_client: &external_services::kms::KmsClient, + ) -> Self { + match config { + AnalyticsConfig::Sqlx { sqlx } => Self::Sqlx( + SqlxClient::from_conf( + sqlx, + #[cfg(feature = "kms")] + kms_client, + ) + .await, + ), + } + } +} + +#[derive(Clone, Debug, serde::Deserialize)] +#[serde(tag = "source")] +#[serde(rename_all = "lowercase")] +pub enum AnalyticsConfig { + Sqlx { sqlx: Database }, +} + +impl Default for AnalyticsConfig { + fn default() -> Self { + Self::Sqlx { + sqlx: Database::default(), + } + } +} diff --git a/crates/router/src/analytics/core.rs b/crates/router/src/analytics/core.rs new file mode 100644 index 000000000000..bf124a6c0e85 --- /dev/null +++ b/crates/router/src/analytics/core.rs @@ -0,0 +1,96 @@ +use api_models::analytics::{ + payments::PaymentDimensions, refunds::RefundDimensions, FilterValue, GetInfoResponse, + GetPaymentFiltersRequest, GetRefundFilterRequest, PaymentFiltersResponse, RefundFilterValue, + RefundFiltersResponse, +}; +use error_stack::ResultExt; + +use super::{ + errors::{self, AnalyticsError}, + payments::filters::{get_payment_filter_for_dimension, FilterRow}, + refunds::filters::{get_refund_filter_for_dimension, RefundFilterRow}, + types::AnalyticsDomain, + utils, AnalyticsProvider, +}; +use crate::{services::ApplicationResponse, types::domain}; + +pub type AnalyticsApiResponse = errors::AnalyticsResult>; + +pub async fn get_domain_info(domain: AnalyticsDomain) -> AnalyticsApiResponse { + let info = match domain { + AnalyticsDomain::Payments => GetInfoResponse { + metrics: utils::get_payment_metrics_info(), + download_dimensions: None, + dimensions: utils::get_payment_dimensions(), + }, + AnalyticsDomain::Refunds => GetInfoResponse { + metrics: utils::get_refund_metrics_info(), + download_dimensions: None, + dimensions: utils::get_refund_dimensions(), + }, + }; + Ok(ApplicationResponse::Json(info)) +} + +pub async fn payment_filters_core( + pool: AnalyticsProvider, + req: GetPaymentFiltersRequest, + merchant: domain::MerchantAccount, +) -> AnalyticsApiResponse { + let mut res = PaymentFiltersResponse::default(); + + for dim in req.group_by_names { + let values = match pool.clone() { + AnalyticsProvider::Sqlx(pool) => { + get_payment_filter_for_dimension(dim, &merchant.merchant_id, &req.time_range, &pool) + .await + } + } + .change_context(AnalyticsError::UnknownError)? + .into_iter() + .filter_map(|fil: FilterRow| match dim { + PaymentDimensions::Currency => fil.currency.map(|i| i.as_ref().to_string()), + PaymentDimensions::PaymentStatus => fil.status.map(|i| i.as_ref().to_string()), + PaymentDimensions::Connector => fil.connector, + PaymentDimensions::AuthType => fil.authentication_type.map(|i| i.as_ref().to_string()), + PaymentDimensions::PaymentMethod => fil.payment_method, + }) + .collect::>(); + res.query_data.push(FilterValue { + dimension: dim, + values, + }) + } + + Ok(ApplicationResponse::Json(res)) +} + +pub async fn refund_filter_core( + pool: AnalyticsProvider, + req: GetRefundFilterRequest, + merchant: domain::MerchantAccount, +) -> AnalyticsApiResponse { + let mut res = RefundFiltersResponse::default(); + for dim in req.group_by_names { + let values = match pool.clone() { + AnalyticsProvider::Sqlx(pool) => { + get_refund_filter_for_dimension(dim, &merchant.merchant_id, &req.time_range, &pool) + .await + } + } + .change_context(AnalyticsError::UnknownError)? + .into_iter() + .filter_map(|fil: RefundFilterRow| match dim { + RefundDimensions::Currency => fil.currency.map(|i| i.as_ref().to_string()), + RefundDimensions::RefundStatus => fil.refund_status.map(|i| i.as_ref().to_string()), + RefundDimensions::Connector => fil.connector, + RefundDimensions::RefundType => fil.refund_type.map(|i| i.as_ref().to_string()), + }) + .collect::>(); + res.query_data.push(RefundFilterValue { + dimension: dim, + values, + }) + } + Ok(ApplicationResponse::Json(res)) +} diff --git a/crates/router/src/analytics/errors.rs b/crates/router/src/analytics/errors.rs new file mode 100644 index 000000000000..da0b2f239cd7 --- /dev/null +++ b/crates/router/src/analytics/errors.rs @@ -0,0 +1,32 @@ +use api_models::errors::types::{ApiError, ApiErrorResponse}; +use common_utils::errors::{CustomResult, ErrorSwitch}; + +pub type AnalyticsResult = CustomResult; + +#[derive(Debug, Clone, serde::Serialize, thiserror::Error)] +pub enum AnalyticsError { + #[allow(dead_code)] + #[error("Not implemented: {0}")] + NotImplemented(&'static str), + #[error("Unknown Analytics Error")] + UnknownError, +} + +impl ErrorSwitch for AnalyticsError { + fn switch(&self) -> ApiErrorResponse { + match self { + Self::NotImplemented(feature) => ApiErrorResponse::NotImplemented(ApiError::new( + "IR", + 0, + format!("{feature} is not implemented."), + None, + )), + Self::UnknownError => ApiErrorResponse::InternalServerError(ApiError::new( + "HE", + 0, + "Something went wrong", + None, + )), + } + } +} diff --git a/crates/router/src/analytics/metrics.rs b/crates/router/src/analytics/metrics.rs new file mode 100644 index 000000000000..6222315a8c06 --- /dev/null +++ b/crates/router/src/analytics/metrics.rs @@ -0,0 +1,9 @@ +use router_env::{global_meter, histogram_metric, histogram_metric_u64, metrics_context}; + +metrics_context!(CONTEXT); +global_meter!(GLOBAL_METER, "ROUTER_API"); + +histogram_metric!(METRIC_FETCH_TIME, GLOBAL_METER); +histogram_metric_u64!(BUCKETS_FETCHED, GLOBAL_METER); + +pub mod request; diff --git a/crates/router/src/analytics/metrics/request.rs b/crates/router/src/analytics/metrics/request.rs new file mode 100644 index 000000000000..b7c202f2db25 --- /dev/null +++ b/crates/router/src/analytics/metrics/request.rs @@ -0,0 +1,60 @@ +pub fn add_attributes>( + key: &'static str, + value: T, +) -> router_env::opentelemetry::KeyValue { + router_env::opentelemetry::KeyValue::new(key, value) +} + +#[inline] +pub async fn record_operation_time( + future: F, + metric: &once_cell::sync::Lazy>, + metric_name: &api_models::analytics::payments::PaymentMetrics, + source: &crate::analytics::AnalyticsProvider, +) -> R +where + F: futures::Future, +{ + let (result, time) = time_future(future).await; + let attributes = &[ + add_attributes("metric_name", metric_name.to_string()), + add_attributes( + "source", + match source { + crate::analytics::AnalyticsProvider::Sqlx(_) => "Sqlx", + }, + ), + ]; + let value = time.as_secs_f64(); + metric.record(&super::CONTEXT, value, attributes); + + router_env::logger::debug!("Attributes: {:?}, Time: {}", attributes, value); + result +} + +use std::time; + +#[inline] +pub async fn time_future(future: F) -> (R, time::Duration) +where + F: futures::Future, +{ + let start = time::Instant::now(); + let result = future.await; + let time_spent = start.elapsed(); + (result, time_spent) +} + +#[macro_export] +macro_rules! histogram_metric { + ($name:ident, $meter:ident) => { + pub(crate) static $name: once_cell::sync::Lazy< + $crate::opentelemetry::metrics::Histogram, + > = once_cell::sync::Lazy::new(|| $meter.u64_histogram(stringify!($name)).init()); + }; + ($name:ident, $meter:ident, $description:literal) => { + pub(crate) static $name: once_cell::sync::Lazy< + $crate::opentelemetry::metrics::Histogram, + > = once_cell::sync::Lazy::new(|| $meter.u64_histogram($description).init()); + }; +} diff --git a/crates/router/src/analytics/payments.rs b/crates/router/src/analytics/payments.rs new file mode 100644 index 000000000000..527bf75a3c72 --- /dev/null +++ b/crates/router/src/analytics/payments.rs @@ -0,0 +1,13 @@ +pub mod accumulator; +mod core; +pub mod filters; +pub mod metrics; +pub mod types; +pub use accumulator::{PaymentMetricAccumulator, PaymentMetricsAccumulator}; + +pub trait PaymentAnalytics: + metrics::PaymentMetricAnalytics + filters::PaymentFilterAnalytics +{ +} + +pub use self::core::get_metrics; diff --git a/crates/router/src/analytics/payments/accumulator.rs b/crates/router/src/analytics/payments/accumulator.rs new file mode 100644 index 000000000000..5eebd0974693 --- /dev/null +++ b/crates/router/src/analytics/payments/accumulator.rs @@ -0,0 +1,150 @@ +use api_models::analytics::payments::PaymentMetricsBucketValue; +use common_enums::enums as storage_enums; +use router_env::logger; + +use super::metrics::PaymentMetricRow; + +#[derive(Debug, Default)] +pub struct PaymentMetricsAccumulator { + pub payment_success_rate: SuccessRateAccumulator, + pub payment_count: CountAccumulator, + pub payment_success: CountAccumulator, + pub processed_amount: SumAccumulator, + pub avg_ticket_size: AverageAccumulator, +} + +#[derive(Debug, Default)] +pub struct SuccessRateAccumulator { + pub success: i64, + pub total: i64, +} + +#[derive(Debug, Default)] +#[repr(transparent)] +pub struct CountAccumulator { + pub count: Option, +} + +#[derive(Debug, Default)] +#[repr(transparent)] +pub struct SumAccumulator { + pub total: Option, +} + +#[derive(Debug, Default)] +pub struct AverageAccumulator { + pub total: u32, + pub count: u32, +} + +pub trait PaymentMetricAccumulator { + type MetricOutput; + + fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow); + + fn collect(self) -> Self::MetricOutput; +} + +impl PaymentMetricAccumulator for SuccessRateAccumulator { + type MetricOutput = Option; + + fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) { + if let Some(ref status) = metrics.status { + if status.as_ref() == &storage_enums::AttemptStatus::Charged { + self.success += metrics.count.unwrap_or_default(); + } + }; + self.total += metrics.count.unwrap_or_default(); + } + + fn collect(self) -> Self::MetricOutput { + if self.total <= 0 { + None + } else { + Some( + f64::from(u32::try_from(self.success).ok()?) * 100.0 + / f64::from(u32::try_from(self.total).ok()?), + ) + } + } +} + +impl PaymentMetricAccumulator for CountAccumulator { + type MetricOutput = Option; + #[inline] + fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) { + self.count = match (self.count, metrics.count) { + (None, None) => None, + (None, i @ Some(_)) | (i @ Some(_), None) => i, + (Some(a), Some(b)) => Some(a + b), + } + } + #[inline] + fn collect(self) -> Self::MetricOutput { + self.count.and_then(|i| u64::try_from(i).ok()) + } +} + +impl PaymentMetricAccumulator for SumAccumulator { + type MetricOutput = Option; + #[inline] + fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) { + self.total = match ( + self.total, + metrics + .total + .as_ref() + .and_then(bigdecimal::ToPrimitive::to_i64), + ) { + (None, None) => None, + (None, i @ Some(_)) | (i @ Some(_), None) => i, + (Some(a), Some(b)) => Some(a + b), + } + } + #[inline] + fn collect(self) -> Self::MetricOutput { + u64::try_from(self.total.unwrap_or(0)).ok() + } +} + +impl PaymentMetricAccumulator for AverageAccumulator { + type MetricOutput = Option; + + fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) { + let total = metrics + .total + .as_ref() + .and_then(bigdecimal::ToPrimitive::to_u32); + let count = metrics.count.and_then(|total| u32::try_from(total).ok()); + + match (total, count) { + (Some(total), Some(count)) => { + self.total += total; + self.count += count; + } + _ => { + logger::error!(message="Dropping metrics for average accumulator", metric=?metrics); + } + } + } + + fn collect(self) -> Self::MetricOutput { + if self.count == 0 { + None + } else { + Some(f64::from(self.total) / f64::from(self.count)) + } + } +} + +impl PaymentMetricsAccumulator { + pub fn collect(self) -> PaymentMetricsBucketValue { + PaymentMetricsBucketValue { + payment_success_rate: self.payment_success_rate.collect(), + payment_count: self.payment_count.collect(), + payment_success_count: self.payment_success.collect(), + payment_processed_amount: self.processed_amount.collect(), + avg_ticket_size: self.avg_ticket_size.collect(), + } + } +} diff --git a/crates/router/src/analytics/payments/core.rs b/crates/router/src/analytics/payments/core.rs new file mode 100644 index 000000000000..23eca8879a70 --- /dev/null +++ b/crates/router/src/analytics/payments/core.rs @@ -0,0 +1,129 @@ +use std::collections::HashMap; + +use api_models::analytics::{ + payments::{MetricsBucketResponse, PaymentMetrics, PaymentMetricsBucketIdentifier}, + AnalyticsMetadata, GetPaymentMetricRequest, MetricsResponse, +}; +use error_stack::{IntoReport, ResultExt}; +use router_env::{ + instrument, logger, + tracing::{self, Instrument}, +}; + +use super::PaymentMetricsAccumulator; +use crate::{ + analytics::{ + core::AnalyticsApiResponse, errors::AnalyticsError, metrics, + payments::PaymentMetricAccumulator, AnalyticsProvider, + }, + services::ApplicationResponse, + types::domain, +}; + +#[instrument(skip_all)] +pub async fn get_metrics( + pool: AnalyticsProvider, + merchant_account: domain::MerchantAccount, + req: GetPaymentMetricRequest, +) -> AnalyticsApiResponse> { + let mut metrics_accumulator: HashMap< + PaymentMetricsBucketIdentifier, + PaymentMetricsAccumulator, + > = HashMap::new(); + + let mut set = tokio::task::JoinSet::new(); + for metric_type in req.metrics.iter().cloned() { + let req = req.clone(); + let merchant_id = merchant_account.merchant_id.clone(); + let pool = pool.clone(); + let task_span = tracing::debug_span!( + "analytics_payments_query", + payment_metric = metric_type.as_ref() + ); + set.spawn( + async move { + let data = pool + .get_payment_metrics( + &metric_type, + &req.group_by_names.clone(), + &merchant_id, + &req.filters, + &req.time_series.map(|t| t.granularity), + &req.time_range, + ) + .await + .change_context(AnalyticsError::UnknownError); + (metric_type, data) + } + .instrument(task_span), + ); + } + + while let Some((metric, data)) = set + .join_next() + .await + .transpose() + .into_report() + .change_context(AnalyticsError::UnknownError)? + { + let data = data?; + let attributes = &[ + metrics::request::add_attributes("metric_type", metric.to_string()), + metrics::request::add_attributes( + "source", + match pool { + crate::analytics::AnalyticsProvider::Sqlx(_) => "Sqlx", + }, + ), + ]; + + let value = u64::try_from(data.len()); + if let Ok(val) = value { + metrics::BUCKETS_FETCHED.record(&metrics::CONTEXT, val, attributes); + logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val); + } + + for (id, value) in data { + logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for metric {metric}"); + let metrics_builder = metrics_accumulator.entry(id).or_default(); + match metric { + PaymentMetrics::PaymentSuccessRate => metrics_builder + .payment_success_rate + .add_metrics_bucket(&value), + PaymentMetrics::PaymentCount => { + metrics_builder.payment_count.add_metrics_bucket(&value) + } + PaymentMetrics::PaymentSuccessCount => { + metrics_builder.payment_success.add_metrics_bucket(&value) + } + PaymentMetrics::PaymentProcessedAmount => { + metrics_builder.processed_amount.add_metrics_bucket(&value) + } + PaymentMetrics::AvgTicketSize => { + metrics_builder.avg_ticket_size.add_metrics_bucket(&value) + } + } + } + + logger::debug!( + "Analytics Accumulated Results: metric: {}, results: {:#?}", + metric, + metrics_accumulator + ); + } + + let query_data: Vec = metrics_accumulator + .into_iter() + .map(|(id, val)| MetricsBucketResponse { + values: val.collect(), + dimensions: id, + }) + .collect(); + + Ok(ApplicationResponse::Json(MetricsResponse { + query_data, + meta_data: [AnalyticsMetadata { + current_time_range: req.time_range, + }], + })) +} diff --git a/crates/router/src/analytics/payments/filters.rs b/crates/router/src/analytics/payments/filters.rs new file mode 100644 index 000000000000..f009aaa76329 --- /dev/null +++ b/crates/router/src/analytics/payments/filters.rs @@ -0,0 +1,58 @@ +use api_models::analytics::{payments::PaymentDimensions, Granularity, TimeRange}; +use common_enums::enums::{AttemptStatus, AuthenticationType, Currency}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use crate::analytics::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql}, + types::{ + AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, FiltersError, FiltersResult, + LoadRow, + }, +}; + +pub trait PaymentFilterAnalytics: LoadRow {} + +pub async fn get_payment_filter_for_dimension( + dimension: PaymentDimensions, + merchant: &String, + time_range: &TimeRange, + pool: &T, +) -> FiltersResult> +where + T: AnalyticsDataSource + PaymentFilterAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, +{ + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Payment); + + query_builder.add_select_column(dimension).switch()?; + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + query_builder + .add_filter_clause("merchant_id", merchant) + .switch()?; + + query_builder.set_distinct(); + + query_builder + .execute_query::(pool) + .await + .change_context(FiltersError::QueryBuildingError)? + .change_context(FiltersError::QueryExecutionFailure) +} + +#[derive(Debug, serde::Serialize, Eq, PartialEq)] +pub struct FilterRow { + pub currency: Option>, + pub status: Option>, + pub connector: Option, + pub authentication_type: Option>, + pub payment_method: Option, +} diff --git a/crates/router/src/analytics/payments/metrics.rs b/crates/router/src/analytics/payments/metrics.rs new file mode 100644 index 000000000000..f492e5bd4df9 --- /dev/null +++ b/crates/router/src/analytics/payments/metrics.rs @@ -0,0 +1,137 @@ +use api_models::analytics::{ + payments::{PaymentDimensions, PaymentFilters, PaymentMetrics, PaymentMetricsBucketIdentifier}, + Granularity, TimeRange, +}; +use common_enums::enums as storage_enums; +use time::PrimitiveDateTime; + +use crate::analytics::{ + query::{Aggregate, GroupByClause, ToSql}, + types::{AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, MetricsResult}, +}; + +mod avg_ticket_size; +mod payment_count; +mod payment_processed_amount; +mod payment_success_count; +mod success_rate; + +use avg_ticket_size::AvgTicketSize; +use payment_count::PaymentCount; +use payment_processed_amount::PaymentProcessedAmount; +use payment_success_count::PaymentSuccessCount; +use success_rate::PaymentSuccessRate; + +#[derive(Debug, PartialEq, Eq)] +pub struct PaymentMetricRow { + pub currency: Option>, + pub status: Option>, + pub connector: Option, + pub authentication_type: Option>, + pub payment_method: Option, + pub total: Option, + pub count: Option, + pub start_bucket: Option, + pub end_bucket: Option, +} + +pub trait PaymentMetricAnalytics: LoadRow {} + +#[async_trait::async_trait] +pub trait PaymentMetric +where + T: AnalyticsDataSource + PaymentMetricAnalytics, +{ + async fn load_metrics( + &self, + dimensions: &[PaymentDimensions], + merchant_id: &str, + filters: &PaymentFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult>; +} + +#[async_trait::async_trait] +impl PaymentMetric for PaymentMetrics +where + T: AnalyticsDataSource + PaymentMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[PaymentDimensions], + merchant_id: &str, + filters: &PaymentFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + match self { + Self::PaymentSuccessRate => { + PaymentSuccessRate + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::PaymentCount => { + PaymentCount + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::PaymentSuccessCount => { + PaymentSuccessCount + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::PaymentProcessedAmount => { + PaymentProcessedAmount + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::AvgTicketSize => { + AvgTicketSize + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + } + } +} diff --git a/crates/router/src/analytics/payments/metrics/avg_ticket_size.rs b/crates/router/src/analytics/payments/metrics/avg_ticket_size.rs new file mode 100644 index 000000000000..2230d870e68a --- /dev/null +++ b/crates/router/src/analytics/payments/metrics/avg_ticket_size.rs @@ -0,0 +1,126 @@ +use api_models::analytics::{ + payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier}, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::{PaymentMetric, PaymentMetricRow}; +use crate::analytics::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct AvgTicketSize; + +#[async_trait::async_trait] +impl PaymentMetric for AvgTicketSize +where + T: AnalyticsDataSource + super::PaymentMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[PaymentDimensions], + merchant_id: &str, + filters: &PaymentFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Payment); + + for dim in dimensions.iter() { + query_builder.add_select_column(dim).switch()?; + } + + query_builder + .add_select_column(Aggregate::Sum { + field: "amount", + alias: Some("total"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Min { + field: "created_at", + alias: Some("start_bucket"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Max { + field: "created_at", + alias: Some("end_bucket"), + }) + .switch()?; + + filters.set_filter_clause(&mut query_builder).switch()?; + + query_builder + .add_filter_clause("merchant_id", merchant_id) + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + for dim in dimensions.iter() { + query_builder + .add_group_by_clause(dim) + .attach_printable("Error grouping by dimensions") + .switch()?; + } + + if let Some(granularity) = granularity.as_ref() { + granularity + .set_group_by_clause(&mut query_builder) + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + PaymentMetricsBucketIdentifier::new( + i.currency.as_ref().map(|i| i.0), + i.status.as_ref().map(|i| i.0), + i.connector.clone(), + i.authentication_type.as_ref().map(|i| i.0), + i.payment_method.clone(), + TimeRange { + start_time: match (granularity, i.start_bucket) { + (Some(g), Some(st)) => g.clip_to_start(st)?, + _ => time_range.start_time, + }, + end_time: granularity.as_ref().map_or_else( + || Ok(time_range.end_time), + |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), + )?, + }, + ), + i, + )) + }) + .collect::, + crate::analytics::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/router/src/analytics/payments/metrics/payment_count.rs b/crates/router/src/analytics/payments/metrics/payment_count.rs new file mode 100644 index 000000000000..661cec3dac36 --- /dev/null +++ b/crates/router/src/analytics/payments/metrics/payment_count.rs @@ -0,0 +1,117 @@ +use api_models::analytics::{ + payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier}, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::PaymentMetricRow; +use crate::analytics::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct PaymentCount; + +#[async_trait::async_trait] +impl super::PaymentMetric for PaymentCount +where + T: AnalyticsDataSource + super::PaymentMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[PaymentDimensions], + merchant_id: &str, + filters: &PaymentFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Payment); + + for dim in dimensions.iter() { + query_builder.add_select_column(dim).switch()?; + } + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Min { + field: "created_at", + alias: Some("start_bucket"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Max { + field: "created_at", + alias: Some("end_bucket"), + }) + .switch()?; + + filters.set_filter_clause(&mut query_builder).switch()?; + + query_builder + .add_filter_clause("merchant_id", merchant_id) + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + for dim in dimensions.iter() { + query_builder + .add_group_by_clause(dim) + .attach_printable("Error grouping by dimensions") + .switch()?; + } + + if let Some(granularity) = granularity.as_ref() { + granularity + .set_group_by_clause(&mut query_builder) + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + PaymentMetricsBucketIdentifier::new( + i.currency.as_ref().map(|i| i.0), + i.status.as_ref().map(|i| i.0), + i.connector.clone(), + i.authentication_type.as_ref().map(|i| i.0), + i.payment_method.clone(), + TimeRange { + start_time: match (granularity, i.start_bucket) { + (Some(g), Some(st)) => g.clip_to_start(st)?, + _ => time_range.start_time, + }, + end_time: granularity.as_ref().map_or_else( + || Ok(time_range.end_time), + |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), + )?, + }, + ), + i, + )) + }) + .collect::, crate::analytics::query::PostProcessingError>>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/router/src/analytics/payments/metrics/payment_processed_amount.rs b/crates/router/src/analytics/payments/metrics/payment_processed_amount.rs new file mode 100644 index 000000000000..2ec0c6f18f9c --- /dev/null +++ b/crates/router/src/analytics/payments/metrics/payment_processed_amount.rs @@ -0,0 +1,128 @@ +use api_models::analytics::{ + payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier}, + Granularity, TimeRange, +}; +use common_enums::enums as storage_enums; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::PaymentMetricRow; +use crate::analytics::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct PaymentProcessedAmount; + +#[async_trait::async_trait] +impl super::PaymentMetric for PaymentProcessedAmount +where + T: AnalyticsDataSource + super::PaymentMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[PaymentDimensions], + merchant_id: &str, + filters: &PaymentFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Payment); + + for dim in dimensions.iter() { + query_builder.add_select_column(dim).switch()?; + } + + query_builder + .add_select_column(Aggregate::Sum { + field: "amount", + alias: Some("total"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Min { + field: "created_at", + alias: Some("start_bucket"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Max { + field: "created_at", + alias: Some("end_bucket"), + }) + .switch()?; + + filters.set_filter_clause(&mut query_builder).switch()?; + + query_builder + .add_filter_clause("merchant_id", merchant_id) + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + for dim in dimensions.iter() { + query_builder + .add_group_by_clause(dim) + .attach_printable("Error grouping by dimensions") + .switch()?; + } + + if let Some(granularity) = granularity.as_ref() { + granularity + .set_group_by_clause(&mut query_builder) + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .add_filter_clause( + PaymentDimensions::PaymentStatus, + storage_enums::AttemptStatus::Charged, + ) + .switch()?; + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + PaymentMetricsBucketIdentifier::new( + i.currency.as_ref().map(|i| i.0), + None, + i.connector.clone(), + i.authentication_type.as_ref().map(|i| i.0), + i.payment_method.clone(), + TimeRange { + start_time: match (granularity, i.start_bucket) { + (Some(g), Some(st)) => g.clip_to_start(st)?, + _ => time_range.start_time, + }, + end_time: granularity.as_ref().map_or_else( + || Ok(time_range.end_time), + |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), + )?, + }, + ), + i, + )) + }) + .collect::, + crate::analytics::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/router/src/analytics/payments/metrics/payment_success_count.rs b/crates/router/src/analytics/payments/metrics/payment_success_count.rs new file mode 100644 index 000000000000..8245fe7aeb88 --- /dev/null +++ b/crates/router/src/analytics/payments/metrics/payment_success_count.rs @@ -0,0 +1,127 @@ +use api_models::analytics::{ + payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier}, + Granularity, TimeRange, +}; +use common_enums::enums as storage_enums; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::PaymentMetricRow; +use crate::analytics::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct PaymentSuccessCount; + +#[async_trait::async_trait] +impl super::PaymentMetric for PaymentSuccessCount +where + T: AnalyticsDataSource + super::PaymentMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[PaymentDimensions], + merchant_id: &str, + filters: &PaymentFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Payment); + + for dim in dimensions.iter() { + query_builder.add_select_column(dim).switch()?; + } + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Min { + field: "created_at", + alias: Some("start_bucket"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Max { + field: "created_at", + alias: Some("end_bucket"), + }) + .switch()?; + + filters.set_filter_clause(&mut query_builder).switch()?; + + query_builder + .add_filter_clause("merchant_id", merchant_id) + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + for dim in dimensions.iter() { + query_builder + .add_group_by_clause(dim) + .attach_printable("Error grouping by dimensions") + .switch()?; + } + + if let Some(granularity) = granularity.as_ref() { + granularity + .set_group_by_clause(&mut query_builder) + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .add_filter_clause( + PaymentDimensions::PaymentStatus, + storage_enums::AttemptStatus::Charged, + ) + .switch()?; + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + PaymentMetricsBucketIdentifier::new( + i.currency.as_ref().map(|i| i.0), + None, + i.connector.clone(), + i.authentication_type.as_ref().map(|i| i.0), + i.payment_method.clone(), + TimeRange { + start_time: match (granularity, i.start_bucket) { + (Some(g), Some(st)) => g.clip_to_start(st)?, + _ => time_range.start_time, + }, + end_time: granularity.as_ref().map_or_else( + || Ok(time_range.end_time), + |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), + )?, + }, + ), + i, + )) + }) + .collect::, + crate::analytics::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/router/src/analytics/payments/metrics/success_rate.rs b/crates/router/src/analytics/payments/metrics/success_rate.rs new file mode 100644 index 000000000000..c63956d4b157 --- /dev/null +++ b/crates/router/src/analytics/payments/metrics/success_rate.rs @@ -0,0 +1,123 @@ +use api_models::analytics::{ + payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier}, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::PaymentMetricRow; +use crate::analytics::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct PaymentSuccessRate; + +#[async_trait::async_trait] +impl super::PaymentMetric for PaymentSuccessRate +where + T: AnalyticsDataSource + super::PaymentMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[PaymentDimensions], + merchant_id: &str, + filters: &PaymentFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Payment); + let mut dimensions = dimensions.to_vec(); + + dimensions.push(PaymentDimensions::PaymentStatus); + + for dim in dimensions.iter() { + query_builder.add_select_column(dim).switch()?; + } + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Min { + field: "created_at", + alias: Some("start_bucket"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Max { + field: "created_at", + alias: Some("end_bucket"), + }) + .switch()?; + + filters.set_filter_clause(&mut query_builder).switch()?; + + query_builder + .add_filter_clause("merchant_id", merchant_id) + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + for dim in dimensions.iter() { + query_builder + .add_group_by_clause(dim) + .attach_printable("Error grouping by dimensions") + .switch()?; + } + + if let Some(granularity) = granularity.as_ref() { + granularity + .set_group_by_clause(&mut query_builder) + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + PaymentMetricsBucketIdentifier::new( + i.currency.as_ref().map(|i| i.0), + None, + i.connector.clone(), + i.authentication_type.as_ref().map(|i| i.0), + i.payment_method.clone(), + TimeRange { + start_time: match (granularity, i.start_bucket) { + (Some(g), Some(st)) => g.clip_to_start(st)?, + _ => time_range.start_time, + }, + end_time: granularity.as_ref().map_or_else( + || Ok(time_range.end_time), + |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), + )?, + }, + ), + i, + )) + }) + .collect::, + crate::analytics::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/router/src/analytics/payments/types.rs b/crates/router/src/analytics/payments/types.rs new file mode 100644 index 000000000000..fdfbedef383d --- /dev/null +++ b/crates/router/src/analytics/payments/types.rs @@ -0,0 +1,46 @@ +use api_models::analytics::payments::{PaymentDimensions, PaymentFilters}; +use error_stack::ResultExt; + +use crate::analytics::{ + query::{QueryBuilder, QueryFilter, QueryResult, ToSql}, + types::{AnalyticsCollection, AnalyticsDataSource}, +}; + +impl QueryFilter for PaymentFilters +where + T: AnalyticsDataSource, + AnalyticsCollection: ToSql, +{ + fn set_filter_clause(&self, builder: &mut QueryBuilder) -> QueryResult<()> { + if !self.currency.is_empty() { + builder + .add_filter_in_range_clause(PaymentDimensions::Currency, &self.currency) + .attach_printable("Error adding currency filter")?; + } + + if !self.status.is_empty() { + builder + .add_filter_in_range_clause(PaymentDimensions::PaymentStatus, &self.status) + .attach_printable("Error adding payment status filter")?; + } + + if !self.connector.is_empty() { + builder + .add_filter_in_range_clause(PaymentDimensions::Connector, &self.connector) + .attach_printable("Error adding connector filter")?; + } + + if !self.auth_type.is_empty() { + builder + .add_filter_in_range_clause(PaymentDimensions::AuthType, &self.auth_type) + .attach_printable("Error adding auth type filter")?; + } + + if !self.payment_method.is_empty() { + builder + .add_filter_in_range_clause(PaymentDimensions::PaymentMethod, &self.payment_method) + .attach_printable("Error adding payment method filter")?; + } + Ok(()) + } +} diff --git a/crates/router/src/analytics/query.rs b/crates/router/src/analytics/query.rs new file mode 100644 index 000000000000..b1f621d8153d --- /dev/null +++ b/crates/router/src/analytics/query.rs @@ -0,0 +1,533 @@ +#![allow(dead_code)] +use std::marker::PhantomData; + +use api_models::{ + analytics::{ + self as analytics_api, + payments::PaymentDimensions, + refunds::{RefundDimensions, RefundType}, + Granularity, + }, + enums::Connector, + refunds::RefundStatus, +}; +use common_enums::{ + enums as storage_enums, + enums::{AttemptStatus, AuthenticationType, Currency, PaymentMethod}, +}; +use common_utils::errors::{CustomResult, ParsingError}; +use error_stack::{IntoReport, ResultExt}; +use router_env::logger; + +use super::types::{AnalyticsCollection, AnalyticsDataSource, LoadRow}; +use crate::analytics::types::QueryExecutionError; +pub type QueryResult = error_stack::Result; +pub trait QueryFilter +where + T: AnalyticsDataSource, + AnalyticsCollection: ToSql, +{ + fn set_filter_clause(&self, builder: &mut QueryBuilder) -> QueryResult<()>; +} + +pub trait GroupByClause +where + T: AnalyticsDataSource, + AnalyticsCollection: ToSql, +{ + fn set_group_by_clause(&self, builder: &mut QueryBuilder) -> QueryResult<()>; +} + +pub trait SeriesBucket { + type SeriesType; + type GranularityLevel; + + fn get_lowest_common_granularity_level(&self) -> Self::GranularityLevel; + + fn get_bucket_size(&self) -> u8; + + fn clip_to_start( + &self, + value: Self::SeriesType, + ) -> error_stack::Result; + + fn clip_to_end( + &self, + value: Self::SeriesType, + ) -> error_stack::Result; +} + +impl QueryFilter for analytics_api::TimeRange +where + T: AnalyticsDataSource, + time::PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, +{ + fn set_filter_clause(&self, builder: &mut QueryBuilder) -> QueryResult<()> { + builder.add_custom_filter_clause("created_at", self.start_time, FilterTypes::Gte)?; + if let Some(end) = self.end_time { + builder.add_custom_filter_clause("created_at", end, FilterTypes::Lte)?; + } + Ok(()) + } +} + +impl GroupByClause for Granularity { + fn set_group_by_clause( + &self, + builder: &mut QueryBuilder, + ) -> QueryResult<()> { + let trunc_scale = self.get_lowest_common_granularity_level(); + + let granularity_bucket_scale = match self { + Self::OneMin => None, + Self::FiveMin | Self::FifteenMin | Self::ThirtyMin => Some("minute"), + Self::OneHour | Self::OneDay => None, + }; + + let granularity_divisor = self.get_bucket_size(); + + builder + .add_group_by_clause(format!("DATE_TRUNC('{trunc_scale}', modified_at)")) + .attach_printable("Error adding time prune group by")?; + if let Some(scale) = granularity_bucket_scale { + builder + .add_group_by_clause(format!( + "FLOOR(DATE_PART('{scale}', modified_at)/{granularity_divisor})" + )) + .attach_printable("Error adding time binning group by")?; + } + Ok(()) + } +} + +#[derive(strum::Display)] +#[strum(serialize_all = "lowercase")] +pub enum TimeGranularityLevel { + Minute, + Hour, + Day, +} + +impl SeriesBucket for Granularity { + type SeriesType = time::PrimitiveDateTime; + + type GranularityLevel = TimeGranularityLevel; + + fn get_lowest_common_granularity_level(&self) -> Self::GranularityLevel { + match self { + Self::OneMin => TimeGranularityLevel::Minute, + Self::FiveMin | Self::FifteenMin | Self::ThirtyMin | Self::OneHour => { + TimeGranularityLevel::Hour + } + Self::OneDay => TimeGranularityLevel::Day, + } + } + + fn get_bucket_size(&self) -> u8 { + match self { + Self::OneMin => 60, + Self::FiveMin => 5, + Self::FifteenMin => 15, + Self::ThirtyMin => 30, + Self::OneHour => 60, + Self::OneDay => 24, + } + } + + fn clip_to_start( + &self, + value: Self::SeriesType, + ) -> error_stack::Result { + let clip_start = |value: u8, modulo: u8| -> u8 { value - value % modulo }; + + let clipped_time = match ( + self.get_lowest_common_granularity_level(), + self.get_bucket_size(), + ) { + (TimeGranularityLevel::Minute, i) => time::Time::MIDNIGHT + .replace_second(clip_start(value.second(), i)) + .and_then(|t| t.replace_minute(value.minute())) + .and_then(|t| t.replace_hour(value.hour())), + (TimeGranularityLevel::Hour, i) => time::Time::MIDNIGHT + .replace_minute(clip_start(value.minute(), i)) + .and_then(|t| t.replace_hour(value.hour())), + (TimeGranularityLevel::Day, i) => { + time::Time::MIDNIGHT.replace_hour(clip_start(value.hour(), i)) + } + } + .into_report() + .change_context(PostProcessingError::BucketClipping)?; + + Ok(value.replace_time(clipped_time)) + } + + fn clip_to_end( + &self, + value: Self::SeriesType, + ) -> error_stack::Result { + let clip_end = |value: u8, modulo: u8| -> u8 { value + modulo - 1 - value % modulo }; + + let clipped_time = match ( + self.get_lowest_common_granularity_level(), + self.get_bucket_size(), + ) { + (TimeGranularityLevel::Minute, i) => time::Time::MIDNIGHT + .replace_second(clip_end(value.second(), i)) + .and_then(|t| t.replace_minute(value.minute())) + .and_then(|t| t.replace_hour(value.hour())), + (TimeGranularityLevel::Hour, i) => time::Time::MIDNIGHT + .replace_minute(clip_end(value.minute(), i)) + .and_then(|t| t.replace_hour(value.hour())), + (TimeGranularityLevel::Day, i) => { + time::Time::MIDNIGHT.replace_hour(clip_end(value.hour(), i)) + } + } + .into_report() + .change_context(PostProcessingError::BucketClipping) + .attach_printable_lazy(|| format!("Bucket Clip Error: {value}"))?; + + Ok(value.replace_time(clipped_time)) + } +} + +#[derive(thiserror::Error, Debug)] +pub enum QueryBuildingError { + #[allow(dead_code)] + #[error("Not Implemented: {0}")] + NotImplemented(String), + #[error("Failed to Serialize to SQL")] + SqlSerializeError, + #[error("Failed to build sql query: {0}")] + InvalidQuery(&'static str), +} + +#[derive(thiserror::Error, Debug)] +pub enum PostProcessingError { + #[error("Error Clipping values to bucket sizes")] + BucketClipping, +} + +#[derive(Debug)] +pub enum Aggregate { + Count { + field: Option, + alias: Option<&'static str>, + }, + Sum { + field: R, + alias: Option<&'static str>, + }, + Min { + field: R, + alias: Option<&'static str>, + }, + Max { + field: R, + alias: Option<&'static str>, + }, +} + +#[derive(Debug)] +pub struct QueryBuilder +where + T: AnalyticsDataSource, + AnalyticsCollection: ToSql, +{ + columns: Vec, + filters: Vec<(String, FilterTypes, String)>, + group_by: Vec, + having: Option>, + table: AnalyticsCollection, + distinct: bool, + db_type: PhantomData, +} + +pub trait ToSql { + fn to_sql(&self) -> error_stack::Result; +} + +/// Implement `ToSql` on arrays of types that impl `ToString`. +macro_rules! impl_to_sql_for_to_string { + ($($type:ty),+) => { + $( + impl ToSql for $type { + fn to_sql(&self) -> error_stack::Result { + Ok(self.to_string()) + } + } + )+ + }; +} + +impl_to_sql_for_to_string!( + String, + &str, + &PaymentDimensions, + &RefundDimensions, + PaymentDimensions, + RefundDimensions, + PaymentMethod, + AuthenticationType, + Connector, + AttemptStatus, + RefundStatus, + storage_enums::RefundStatus, + Currency, + RefundType, + &String, + &bool, + &u64 +); + +#[allow(dead_code)] +#[derive(Debug)] +pub enum FilterTypes { + Equal, + EqualBool, + In, + Gte, + Lte, + Gt, +} + +impl QueryBuilder +where + T: AnalyticsDataSource, + AnalyticsCollection: ToSql, +{ + pub fn new(table: AnalyticsCollection) -> Self { + Self { + columns: Default::default(), + filters: Default::default(), + group_by: Default::default(), + having: Default::default(), + table, + distinct: Default::default(), + db_type: Default::default(), + } + } + + pub fn add_select_column(&mut self, column: impl ToSql) -> QueryResult<()> { + self.columns.push( + column + .to_sql() + .change_context(QueryBuildingError::SqlSerializeError) + .attach_printable("Error serializing select column")?, + ); + Ok(()) + } + + pub fn set_distinct(&mut self) { + self.distinct = true + } + + pub fn add_filter_clause( + &mut self, + key: impl ToSql, + value: impl ToSql, + ) -> QueryResult<()> { + self.add_custom_filter_clause(key, value, FilterTypes::Equal) + } + + pub fn add_bool_filter_clause( + &mut self, + key: impl ToSql, + value: impl ToSql, + ) -> QueryResult<()> { + self.add_custom_filter_clause(key, value, FilterTypes::EqualBool) + } + + pub fn add_custom_filter_clause( + &mut self, + lhs: impl ToSql, + rhs: impl ToSql, + comparison: FilterTypes, + ) -> QueryResult<()> { + self.filters.push(( + lhs.to_sql() + .change_context(QueryBuildingError::SqlSerializeError) + .attach_printable("Error serializing filter key")?, + comparison, + rhs.to_sql() + .change_context(QueryBuildingError::SqlSerializeError) + .attach_printable("Error serializing filter value")?, + )); + Ok(()) + } + + pub fn add_filter_in_range_clause( + &mut self, + key: impl ToSql, + values: &[impl ToSql], + ) -> QueryResult<()> { + let list = values + .iter() + .map(|i| { + // trimming whitespaces from the filter values received in request, to prevent a possibility of an SQL injection + i.to_sql().map(|s| { + let trimmed_str = s.replace(' ', ""); + format!("'{trimmed_str}'") + }) + }) + .collect::, ParsingError>>() + .change_context(QueryBuildingError::SqlSerializeError) + .attach_printable("Error serializing range filter value")? + .join(", "); + self.add_custom_filter_clause(key, list, FilterTypes::In) + } + + pub fn add_group_by_clause(&mut self, column: impl ToSql) -> QueryResult<()> { + self.group_by.push( + column + .to_sql() + .change_context(QueryBuildingError::SqlSerializeError) + .attach_printable("Error serializing group by field")?, + ); + Ok(()) + } + + pub fn add_granularity_in_mins(&mut self, granularity: &Granularity) -> QueryResult<()> { + let interval = match granularity { + Granularity::OneMin => "1", + Granularity::FiveMin => "5", + Granularity::FifteenMin => "15", + Granularity::ThirtyMin => "30", + Granularity::OneHour => "60", + Granularity::OneDay => "1440", + }; + let _ = self.add_select_column(format!( + "toStartOfInterval(created_at, INTERVAL {interval} MINUTE) as time_bucket" + )); + Ok(()) + } + + fn get_filter_clause(&self) -> String { + self.filters + .iter() + .map(|(l, op, r)| match op { + FilterTypes::EqualBool => format!("{l} = {r}"), + FilterTypes::Equal => format!("{l} = '{r}'"), + FilterTypes::In => format!("{l} IN ({r})"), + FilterTypes::Gte => format!("{l} >= '{r}'"), + FilterTypes::Gt => format!("{l} > {r}"), + FilterTypes::Lte => format!("{l} <= '{r}'"), + }) + .collect::>() + .join(" AND ") + } + + fn get_select_clause(&self) -> String { + self.columns.join(", ") + } + + fn get_group_by_clause(&self) -> String { + self.group_by.join(", ") + } + + #[allow(dead_code)] + pub fn add_having_clause( + &mut self, + aggregate: Aggregate, + filter_type: FilterTypes, + value: impl ToSql, + ) -> QueryResult<()> + where + Aggregate: ToSql, + { + let aggregate = aggregate + .to_sql() + .change_context(QueryBuildingError::SqlSerializeError) + .attach_printable("Error serializing having aggregate")?; + let value = value + .to_sql() + .change_context(QueryBuildingError::SqlSerializeError) + .attach_printable("Error serializing having value")?; + let entry = (aggregate, filter_type, value); + if let Some(having) = &mut self.having { + having.push(entry); + } else { + self.having = Some(vec![entry]); + } + Ok(()) + } + + pub fn get_filter_type_clause(&self) -> Option { + self.having.as_ref().map(|vec| { + vec.iter() + .map(|(l, op, r)| match op { + FilterTypes::Equal | FilterTypes::EqualBool => format!("{l} = {r}"), + FilterTypes::In => format!("{l} IN ({r})"), + FilterTypes::Gte => format!("{l} >= {r}"), + FilterTypes::Lte => format!("{l} < {r}"), + FilterTypes::Gt => format!("{l} > {r}"), + }) + .collect::>() + .join(" AND ") + }) + } + + pub fn build_query(&mut self) -> QueryResult + where + Aggregate<&'static str>: ToSql, + { + if self.columns.is_empty() { + Err(QueryBuildingError::InvalidQuery( + "No select fields provided", + )) + .into_report()?; + } + let mut query = String::from("SELECT "); + + if self.distinct { + query.push_str("DISTINCT "); + } + + query.push_str(&self.get_select_clause()); + + query.push_str(" FROM "); + + query.push_str( + &self + .table + .to_sql() + .change_context(QueryBuildingError::SqlSerializeError) + .attach_printable("Error serializing table value")?, + ); + + if !self.filters.is_empty() { + query.push_str(" WHERE "); + query.push_str(&self.get_filter_clause()); + } + + if !self.group_by.is_empty() { + query.push_str(" GROUP BY "); + query.push_str(&self.get_group_by_clause()); + } + + if self.having.is_some() { + if let Some(condition) = self.get_filter_type_clause() { + query.push_str(" HAVING "); + query.push_str(condition.as_str()); + } + } + Ok(query) + } + + pub async fn execute_query( + &mut self, + store: &P, + ) -> CustomResult, QueryExecutionError>, QueryBuildingError> + where + P: LoadRow, + Aggregate<&'static str>: ToSql, + { + let query = self + .build_query() + .change_context(QueryBuildingError::SqlSerializeError) + .attach_printable("Failed to execute query")?; + logger::debug!(?query); + Ok(store.load_results(query.as_str()).await) + } +} diff --git a/crates/router/src/analytics/refunds.rs b/crates/router/src/analytics/refunds.rs new file mode 100644 index 000000000000..a8b52effe76d --- /dev/null +++ b/crates/router/src/analytics/refunds.rs @@ -0,0 +1,10 @@ +pub mod accumulator; +mod core; + +pub mod filters; +pub mod metrics; +pub mod types; +pub use accumulator::{RefundMetricAccumulator, RefundMetricsAccumulator}; + +pub trait RefundAnalytics: metrics::RefundMetricAnalytics {} +pub use self::core::get_metrics; diff --git a/crates/router/src/analytics/refunds/accumulator.rs b/crates/router/src/analytics/refunds/accumulator.rs new file mode 100644 index 000000000000..3d0c0e659f6c --- /dev/null +++ b/crates/router/src/analytics/refunds/accumulator.rs @@ -0,0 +1,110 @@ +use api_models::analytics::refunds::RefundMetricsBucketValue; +use common_enums::enums as storage_enums; + +use super::metrics::RefundMetricRow; +#[derive(Debug, Default)] +pub struct RefundMetricsAccumulator { + pub refund_success_rate: SuccessRateAccumulator, + pub refund_count: CountAccumulator, + pub refund_success: CountAccumulator, + pub processed_amount: SumAccumulator, +} + +#[derive(Debug, Default)] +pub struct SuccessRateAccumulator { + pub success: i64, + pub total: i64, +} + +#[derive(Debug, Default)] +#[repr(transparent)] +pub struct CountAccumulator { + pub count: Option, +} + +#[derive(Debug, Default)] +#[repr(transparent)] +pub struct SumAccumulator { + pub total: Option, +} + +pub trait RefundMetricAccumulator { + type MetricOutput; + + fn add_metrics_bucket(&mut self, metrics: &RefundMetricRow); + + fn collect(self) -> Self::MetricOutput; +} + +impl RefundMetricAccumulator for CountAccumulator { + type MetricOutput = Option; + #[inline] + fn add_metrics_bucket(&mut self, metrics: &RefundMetricRow) { + self.count = match (self.count, metrics.count) { + (None, None) => None, + (None, i @ Some(_)) | (i @ Some(_), None) => i, + (Some(a), Some(b)) => Some(a + b), + } + } + #[inline] + fn collect(self) -> Self::MetricOutput { + self.count.and_then(|i| u64::try_from(i).ok()) + } +} + +impl RefundMetricAccumulator for SumAccumulator { + type MetricOutput = Option; + #[inline] + fn add_metrics_bucket(&mut self, metrics: &RefundMetricRow) { + self.total = match ( + self.total, + metrics + .total + .as_ref() + .and_then(bigdecimal::ToPrimitive::to_i64), + ) { + (None, None) => None, + (None, i @ Some(_)) | (i @ Some(_), None) => i, + (Some(a), Some(b)) => Some(a + b), + } + } + #[inline] + fn collect(self) -> Self::MetricOutput { + self.total.and_then(|i| u64::try_from(i).ok()) + } +} + +impl RefundMetricAccumulator for SuccessRateAccumulator { + type MetricOutput = Option; + + fn add_metrics_bucket(&mut self, metrics: &RefundMetricRow) { + if let Some(ref refund_status) = metrics.refund_status { + if refund_status.as_ref() == &storage_enums::RefundStatus::Success { + self.success += metrics.count.unwrap_or_default(); + } + }; + self.total += metrics.count.unwrap_or_default(); + } + + fn collect(self) -> Self::MetricOutput { + if self.total <= 0 { + None + } else { + Some( + f64::from(u32::try_from(self.success).ok()?) * 100.0 + / f64::from(u32::try_from(self.total).ok()?), + ) + } + } +} + +impl RefundMetricsAccumulator { + pub fn collect(self) -> RefundMetricsBucketValue { + RefundMetricsBucketValue { + refund_success_rate: self.refund_success_rate.collect(), + refund_count: self.refund_count.collect(), + refund_success_count: self.refund_success.collect(), + refund_processed_amount: self.processed_amount.collect(), + } + } +} diff --git a/crates/router/src/analytics/refunds/core.rs b/crates/router/src/analytics/refunds/core.rs new file mode 100644 index 000000000000..4c2d2c394181 --- /dev/null +++ b/crates/router/src/analytics/refunds/core.rs @@ -0,0 +1,104 @@ +use std::collections::HashMap; + +use api_models::analytics::{ + refunds::{RefundMetrics, RefundMetricsBucketIdentifier, RefundMetricsBucketResponse}, + AnalyticsMetadata, GetRefundMetricRequest, MetricsResponse, +}; +use error_stack::{IntoReport, ResultExt}; +use router_env::{ + logger, + tracing::{self, Instrument}, +}; + +use super::RefundMetricsAccumulator; +use crate::{ + analytics::{ + core::AnalyticsApiResponse, errors::AnalyticsError, refunds::RefundMetricAccumulator, + AnalyticsProvider, + }, + services::ApplicationResponse, + types::domain, +}; + +pub async fn get_metrics( + pool: AnalyticsProvider, + merchant_account: domain::MerchantAccount, + req: GetRefundMetricRequest, +) -> AnalyticsApiResponse> { + let mut metrics_accumulator: HashMap = + HashMap::new(); + let mut set = tokio::task::JoinSet::new(); + for metric_type in req.metrics.iter().cloned() { + let req = req.clone(); + let merchant_id = merchant_account.merchant_id.clone(); + let pool = pool.clone(); + let task_span = tracing::debug_span!( + "analytics_refund_query", + refund_metric = metric_type.as_ref() + ); + set.spawn( + async move { + let data = pool + .get_refund_metrics( + &metric_type, + &req.group_by_names.clone(), + &merchant_id, + &req.filters, + &req.time_series.map(|t| t.granularity), + &req.time_range, + ) + .await + .change_context(AnalyticsError::UnknownError); + (metric_type, data) + } + .instrument(task_span), + ); + } + + while let Some((metric, data)) = set + .join_next() + .await + .transpose() + .into_report() + .change_context(AnalyticsError::UnknownError)? + { + for (id, value) in data? { + logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for metric {metric}"); + let metrics_builder = metrics_accumulator.entry(id).or_default(); + match metric { + RefundMetrics::RefundSuccessRate => metrics_builder + .refund_success_rate + .add_metrics_bucket(&value), + RefundMetrics::RefundCount => { + metrics_builder.refund_count.add_metrics_bucket(&value) + } + RefundMetrics::RefundSuccessCount => { + metrics_builder.refund_success.add_metrics_bucket(&value) + } + RefundMetrics::RefundProcessedAmount => { + metrics_builder.processed_amount.add_metrics_bucket(&value) + } + } + } + + logger::debug!( + "Analytics Accumulated Results: metric: {}, results: {:#?}", + metric, + metrics_accumulator + ); + } + let query_data: Vec = metrics_accumulator + .into_iter() + .map(|(id, val)| RefundMetricsBucketResponse { + values: val.collect(), + dimensions: id, + }) + .collect(); + + Ok(ApplicationResponse::Json(MetricsResponse { + query_data, + meta_data: [AnalyticsMetadata { + current_time_range: req.time_range, + }], + })) +} diff --git a/crates/router/src/analytics/refunds/filters.rs b/crates/router/src/analytics/refunds/filters.rs new file mode 100644 index 000000000000..6b45e9194fad --- /dev/null +++ b/crates/router/src/analytics/refunds/filters.rs @@ -0,0 +1,59 @@ +use api_models::analytics::{ + refunds::{RefundDimensions, RefundType}, + Granularity, TimeRange, +}; +use common_enums::enums::{Currency, RefundStatus}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use crate::analytics::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql}, + types::{ + AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, FiltersError, FiltersResult, + LoadRow, + }, +}; +pub trait RefundFilterAnalytics: LoadRow {} + +pub async fn get_refund_filter_for_dimension( + dimension: RefundDimensions, + merchant: &String, + time_range: &TimeRange, + pool: &T, +) -> FiltersResult> +where + T: AnalyticsDataSource + RefundFilterAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, +{ + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Refund); + + query_builder.add_select_column(dimension).switch()?; + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + query_builder + .add_filter_clause("merchant_id", merchant) + .switch()?; + + query_builder.set_distinct(); + + query_builder + .execute_query::(pool) + .await + .change_context(FiltersError::QueryBuildingError)? + .change_context(FiltersError::QueryExecutionFailure) +} + +#[derive(Debug, serde::Serialize, Eq, PartialEq)] +pub struct RefundFilterRow { + pub currency: Option>, + pub refund_status: Option>, + pub connector: Option, + pub refund_type: Option>, +} diff --git a/crates/router/src/analytics/refunds/metrics.rs b/crates/router/src/analytics/refunds/metrics.rs new file mode 100644 index 000000000000..d4f509b4a1e3 --- /dev/null +++ b/crates/router/src/analytics/refunds/metrics.rs @@ -0,0 +1,126 @@ +use api_models::analytics::{ + refunds::{ + RefundDimensions, RefundFilters, RefundMetrics, RefundMetricsBucketIdentifier, RefundType, + }, + Granularity, TimeRange, +}; +use common_enums::enums as storage_enums; +use time::PrimitiveDateTime; +mod refund_count; +mod refund_processed_amount; +mod refund_success_count; +mod refund_success_rate; +use refund_count::RefundCount; +use refund_processed_amount::RefundProcessedAmount; +use refund_success_count::RefundSuccessCount; +use refund_success_rate::RefundSuccessRate; + +use crate::analytics::{ + query::{Aggregate, GroupByClause, ToSql}, + types::{AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, MetricsResult}, +}; + +#[derive(Debug, Eq, PartialEq)] +pub struct RefundMetricRow { + pub currency: Option>, + pub refund_status: Option>, + pub connector: Option, + pub refund_type: Option>, + pub total: Option, + pub count: Option, + pub start_bucket: Option, + pub end_bucket: Option, +} + +pub trait RefundMetricAnalytics: LoadRow {} + +#[async_trait::async_trait] +pub trait RefundMetric +where + T: AnalyticsDataSource + RefundMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[RefundDimensions], + merchant_id: &str, + filters: &RefundFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult>; +} + +#[async_trait::async_trait] +impl RefundMetric for RefundMetrics +where + T: AnalyticsDataSource + RefundMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[RefundDimensions], + merchant_id: &str, + filters: &RefundFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + match self { + Self::RefundSuccessRate => { + RefundSuccessRate::default() + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::RefundCount => { + RefundCount::default() + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::RefundSuccessCount => { + RefundSuccessCount::default() + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::RefundProcessedAmount => { + RefundProcessedAmount::default() + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + } + } +} diff --git a/crates/router/src/analytics/refunds/metrics/refund_count.rs b/crates/router/src/analytics/refunds/metrics/refund_count.rs new file mode 100644 index 000000000000..471327235073 --- /dev/null +++ b/crates/router/src/analytics/refunds/metrics/refund_count.rs @@ -0,0 +1,116 @@ +use api_models::analytics::{ + refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier}, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::RefundMetricRow; +use crate::analytics::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct RefundCount {} + +#[async_trait::async_trait] +impl super::RefundMetric for RefundCount +where + T: AnalyticsDataSource + super::RefundMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[RefundDimensions], + merchant_id: &str, + filters: &RefundFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Refund); + + for dim in dimensions.iter() { + query_builder.add_select_column(dim).switch()?; + } + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Min { + field: "created_at", + alias: Some("start_bucket"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Max { + field: "created_at", + alias: Some("end_bucket"), + }) + .switch()?; + + filters.set_filter_clause(&mut query_builder).switch()?; + + query_builder + .add_filter_clause("merchant_id", merchant_id) + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + for dim in dimensions.iter() { + query_builder + .add_group_by_clause(dim) + .attach_printable("Error grouping by dimensions") + .switch()?; + } + + if let Some(granularity) = granularity.as_ref() { + granularity + .set_group_by_clause(&mut query_builder) + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + RefundMetricsBucketIdentifier::new( + i.currency.as_ref().map(|i| i.0), + i.refund_status.as_ref().map(|i| i.0), + i.connector.clone(), + i.refund_type.as_ref().map(|i| i.0.to_string()), + TimeRange { + start_time: match (granularity, i.start_bucket) { + (Some(g), Some(st)) => g.clip_to_start(st)?, + _ => time_range.start_time, + }, + end_time: granularity.as_ref().map_or_else( + || Ok(time_range.end_time), + |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), + )?, + }, + ), + i, + )) + }) + .collect::, crate::analytics::query::PostProcessingError>>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/router/src/analytics/refunds/metrics/refund_processed_amount.rs b/crates/router/src/analytics/refunds/metrics/refund_processed_amount.rs new file mode 100644 index 000000000000..c5f3a706aaef --- /dev/null +++ b/crates/router/src/analytics/refunds/metrics/refund_processed_amount.rs @@ -0,0 +1,122 @@ +use api_models::analytics::{ + refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier}, + Granularity, TimeRange, +}; +use common_enums::enums as storage_enums; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::RefundMetricRow; +use crate::analytics::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; +#[derive(Default)] +pub(super) struct RefundProcessedAmount {} + +#[async_trait::async_trait] +impl super::RefundMetric for RefundProcessedAmount +where + T: AnalyticsDataSource + super::RefundMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[RefundDimensions], + merchant_id: &str, + filters: &RefundFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> + where + T: AnalyticsDataSource + super::RefundMetricAnalytics, + { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Refund); + + for dim in dimensions.iter() { + query_builder.add_select_column(dim).switch()?; + } + + query_builder + .add_select_column(Aggregate::Sum { + field: "refund_amount", + alias: Some("total"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Min { + field: "created_at", + alias: Some("start_bucket"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Max { + field: "created_at", + alias: Some("end_bucket"), + }) + .switch()?; + + filters.set_filter_clause(&mut query_builder).switch()?; + + query_builder + .add_filter_clause("merchant_id", merchant_id) + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + for dim in dimensions.iter() { + query_builder.add_group_by_clause(dim).switch()?; + } + + if let Some(granularity) = granularity.as_ref() { + granularity + .set_group_by_clause(&mut query_builder) + .switch()?; + } + + query_builder + .add_filter_clause( + RefundDimensions::RefundStatus, + storage_enums::RefundStatus::Success, + ) + .switch()?; + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + RefundMetricsBucketIdentifier::new( + i.currency.as_ref().map(|i| i.0), + None, + i.connector.clone(), + i.refund_type.as_ref().map(|i| i.0.to_string()), + TimeRange { + start_time: match (granularity, i.start_bucket) { + (Some(g), Some(st)) => g.clip_to_start(st)?, + _ => time_range.start_time, + }, + end_time: granularity.as_ref().map_or_else( + || Ok(time_range.end_time), + |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), + )?, + }, + ), + i, + )) + }) + .collect::, crate::analytics::query::PostProcessingError>>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/router/src/analytics/refunds/metrics/refund_success_count.rs b/crates/router/src/analytics/refunds/metrics/refund_success_count.rs new file mode 100644 index 000000000000..0c8032908fd7 --- /dev/null +++ b/crates/router/src/analytics/refunds/metrics/refund_success_count.rs @@ -0,0 +1,122 @@ +use api_models::analytics::{ + refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier}, + Granularity, TimeRange, +}; +use common_enums::enums as storage_enums; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::RefundMetricRow; +use crate::analytics::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct RefundSuccessCount {} + +#[async_trait::async_trait] +impl super::RefundMetric for RefundSuccessCount +where + T: AnalyticsDataSource + super::RefundMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[RefundDimensions], + merchant_id: &str, + filters: &RefundFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> + where + T: AnalyticsDataSource + super::RefundMetricAnalytics, + { + let mut query_builder = QueryBuilder::new(AnalyticsCollection::Refund); + + for dim in dimensions.iter() { + query_builder.add_select_column(dim).switch()?; + } + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Min { + field: "created_at", + alias: Some("start_bucket"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Max { + field: "created_at", + alias: Some("end_bucket"), + }) + .switch()?; + + filters.set_filter_clause(&mut query_builder).switch()?; + + query_builder + .add_filter_clause("merchant_id", merchant_id) + .switch()?; + + time_range.set_filter_clause(&mut query_builder).switch()?; + + for dim in dimensions.iter() { + query_builder.add_group_by_clause(dim).switch()?; + } + + if let Some(granularity) = granularity.as_ref() { + granularity + .set_group_by_clause(&mut query_builder) + .switch()?; + } + + query_builder + .add_filter_clause( + RefundDimensions::RefundStatus, + storage_enums::RefundStatus::Success, + ) + .switch()?; + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + RefundMetricsBucketIdentifier::new( + i.currency.as_ref().map(|i| i.0), + None, + i.connector.clone(), + i.refund_type.as_ref().map(|i| i.0.to_string()), + TimeRange { + start_time: match (granularity, i.start_bucket) { + (Some(g), Some(st)) => g.clip_to_start(st)?, + _ => time_range.start_time, + }, + end_time: granularity.as_ref().map_or_else( + || Ok(time_range.end_time), + |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), + )?, + }, + ), + i, + )) + }) + .collect::, + crate::analytics::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/router/src/analytics/refunds/metrics/refund_success_rate.rs b/crates/router/src/analytics/refunds/metrics/refund_success_rate.rs new file mode 100644 index 000000000000..42f9ccf8d3c0 --- /dev/null +++ b/crates/router/src/analytics/refunds/metrics/refund_success_rate.rs @@ -0,0 +1,117 @@ +use api_models::analytics::{ + refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier}, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::RefundMetricRow; +use crate::analytics::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; +#[derive(Default)] +pub(super) struct RefundSuccessRate {} + +#[async_trait::async_trait] +impl super::RefundMetric for RefundSuccessRate +where + T: AnalyticsDataSource + super::RefundMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[RefundDimensions], + merchant_id: &str, + filters: &RefundFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> + where + T: AnalyticsDataSource + super::RefundMetricAnalytics, + { + let mut query_builder = QueryBuilder::new(AnalyticsCollection::Refund); + let mut dimensions = dimensions.to_vec(); + + dimensions.push(RefundDimensions::RefundStatus); + + for dim in dimensions.iter() { + query_builder.add_select_column(dim).switch()?; + } + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Min { + field: "created_at", + alias: Some("start_bucket"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Max { + field: "created_at", + alias: Some("end_bucket"), + }) + .switch()?; + + filters.set_filter_clause(&mut query_builder).switch()?; + + query_builder + .add_filter_clause("merchant_id", merchant_id) + .switch()?; + + time_range.set_filter_clause(&mut query_builder).switch()?; + + for dim in dimensions.iter() { + query_builder.add_group_by_clause(dim).switch()?; + } + + if let Some(granularity) = granularity.as_ref() { + granularity + .set_group_by_clause(&mut query_builder) + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + RefundMetricsBucketIdentifier::new( + i.currency.as_ref().map(|i| i.0), + None, + i.connector.clone(), + i.refund_type.as_ref().map(|i| i.0.to_string()), + TimeRange { + start_time: match (granularity, i.start_bucket) { + (Some(g), Some(st)) => g.clip_to_start(st)?, + _ => time_range.start_time, + }, + end_time: granularity.as_ref().map_or_else( + || Ok(time_range.end_time), + |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), + )?, + }, + ), + i, + )) + }) + .collect::, + crate::analytics::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/router/src/analytics/refunds/types.rs b/crates/router/src/analytics/refunds/types.rs new file mode 100644 index 000000000000..fbfd69972671 --- /dev/null +++ b/crates/router/src/analytics/refunds/types.rs @@ -0,0 +1,41 @@ +use api_models::analytics::refunds::{RefundDimensions, RefundFilters}; +use error_stack::ResultExt; + +use crate::analytics::{ + query::{QueryBuilder, QueryFilter, QueryResult, ToSql}, + types::{AnalyticsCollection, AnalyticsDataSource}, +}; + +impl QueryFilter for RefundFilters +where + T: AnalyticsDataSource, + AnalyticsCollection: ToSql, +{ + fn set_filter_clause(&self, builder: &mut QueryBuilder) -> QueryResult<()> { + if !self.currency.is_empty() { + builder + .add_filter_in_range_clause(RefundDimensions::Currency, &self.currency) + .attach_printable("Error adding currency filter")?; + } + + if !self.refund_status.is_empty() { + builder + .add_filter_in_range_clause(RefundDimensions::RefundStatus, &self.refund_status) + .attach_printable("Error adding refund status filter")?; + } + + if !self.connector.is_empty() { + builder + .add_filter_in_range_clause(RefundDimensions::Connector, &self.connector) + .attach_printable("Error adding connector filter")?; + } + + if !self.refund_type.is_empty() { + builder + .add_filter_in_range_clause(RefundDimensions::RefundType, &self.refund_type) + .attach_printable("Error adding auth type filter")?; + } + + Ok(()) + } +} diff --git a/crates/router/src/analytics/routes.rs b/crates/router/src/analytics/routes.rs new file mode 100644 index 000000000000..298ec61ec903 --- /dev/null +++ b/crates/router/src/analytics/routes.rs @@ -0,0 +1,145 @@ +use actix_web::{web, Responder, Scope}; +use api_models::analytics::{ + GetPaymentFiltersRequest, GetPaymentMetricRequest, GetRefundFilterRequest, + GetRefundMetricRequest, +}; +use router_env::AnalyticsFlow; + +use super::{core::*, payments, refunds, types::AnalyticsDomain}; +use crate::{ + core::api_locking, + services::{api, authentication as auth, authentication::AuthenticationData}, + AppState, +}; + +pub struct Analytics; + +impl Analytics { + pub fn server(state: AppState) -> Scope { + let route = web::scope("/analytics/v1").app_data(web::Data::new(state)); + route + .service(web::resource("metrics/payments").route(web::post().to(get_payment_metrics))) + .service(web::resource("metrics/refunds").route(web::post().to(get_refunds_metrics))) + .service(web::resource("filters/payments").route(web::post().to(get_payment_filters))) + .service(web::resource("filters/refunds").route(web::post().to(get_refund_filters))) + .service(web::resource("{domain}/info").route(web::get().to(get_info))) + } +} + +pub async fn get_info( + state: web::Data, + req: actix_web::HttpRequest, + domain: actix_web::web::Path, +) -> impl Responder { + let flow = AnalyticsFlow::GetInfo; + api::server_wrap( + flow, + state, + &req, + domain.into_inner(), + |_, _, domain| get_domain_info(domain), + &auth::NoAuth, + api_locking::LockAction::NotApplicable, + ) + .await +} + +/// # Panics +/// +/// Panics if `json_payload` array does not contain one `GetPaymentMetricRequest` element. +pub async fn get_payment_metrics( + state: web::Data, + req: actix_web::HttpRequest, + json_payload: web::Json<[GetPaymentMetricRequest; 1]>, +) -> impl Responder { + // safety: This shouldn't panic owing to the data type + #[allow(clippy::expect_used)] + let payload = json_payload + .into_inner() + .to_vec() + .pop() + .expect("Couldn't get GetPaymentMetricRequest"); + let flow = AnalyticsFlow::GetPaymentMetrics; + api::server_wrap( + flow, + state, + &req, + payload, + |state, auth: AuthenticationData, req| { + payments::get_metrics(state.pool.clone(), auth.merchant_account, req) + }, + auth::auth_type(&auth::ApiKeyAuth, &auth::JWTAuth, req.headers()), + api_locking::LockAction::NotApplicable, + ) + .await +} + +/// # Panics +/// +/// Panics if `json_payload` array does not contain one `GetRefundMetricRequest` element. +pub async fn get_refunds_metrics( + state: web::Data, + req: actix_web::HttpRequest, + json_payload: web::Json<[GetRefundMetricRequest; 1]>, +) -> impl Responder { + #[allow(clippy::expect_used)] + // safety: This shouldn't panic owing to the data type + let payload = json_payload + .into_inner() + .to_vec() + .pop() + .expect("Couldn't get GetRefundMetricRequest"); + let flow = AnalyticsFlow::GetRefundsMetrics; + api::server_wrap( + flow, + state, + &req, + payload, + |state, auth: AuthenticationData, req| { + refunds::get_metrics(state.pool.clone(), auth.merchant_account, req) + }, + auth::auth_type(&auth::ApiKeyAuth, &auth::JWTAuth, req.headers()), + api_locking::LockAction::NotApplicable, + ) + .await +} + +pub async fn get_payment_filters( + state: web::Data, + req: actix_web::HttpRequest, + json_payload: web::Json, +) -> impl Responder { + let flow = AnalyticsFlow::GetPaymentFilters; + api::server_wrap( + flow, + state, + &req, + json_payload.into_inner(), + |state, auth: AuthenticationData, req| { + payment_filters_core(state.pool.clone(), req, auth.merchant_account) + }, + auth::auth_type(&auth::ApiKeyAuth, &auth::JWTAuth, req.headers()), + api_locking::LockAction::NotApplicable, + ) + .await +} + +pub async fn get_refund_filters( + state: web::Data, + req: actix_web::HttpRequest, + json_payload: web::Json, +) -> impl Responder { + let flow = AnalyticsFlow::GetRefundFilters; + api::server_wrap( + flow, + state, + &req, + json_payload.into_inner(), + |state, auth: AuthenticationData, req: GetRefundFilterRequest| { + refund_filter_core(state.pool.clone(), req, auth.merchant_account) + }, + auth::auth_type(&auth::ApiKeyAuth, &auth::JWTAuth, req.headers()), + api_locking::LockAction::NotApplicable, + ) + .await +} diff --git a/crates/router/src/analytics/sqlx.rs b/crates/router/src/analytics/sqlx.rs new file mode 100644 index 000000000000..b88a2065f0b0 --- /dev/null +++ b/crates/router/src/analytics/sqlx.rs @@ -0,0 +1,401 @@ +use std::{fmt::Display, str::FromStr}; + +use api_models::analytics::refunds::RefundType; +use common_enums::enums::{ + AttemptStatus, AuthenticationType, Currency, PaymentMethod, RefundStatus, +}; +use common_utils::errors::{CustomResult, ParsingError}; +use error_stack::{IntoReport, ResultExt}; +#[cfg(feature = "kms")] +use external_services::{kms, kms::decrypt::KmsDecrypt}; +#[cfg(not(feature = "kms"))] +use masking::PeekInterface; +use sqlx::{ + postgres::{PgArgumentBuffer, PgPoolOptions, PgRow, PgTypeInfo, PgValueRef}, + Decode, Encode, + Error::ColumnNotFound, + FromRow, Pool, Postgres, Row, +}; +use time::PrimitiveDateTime; + +use super::{ + query::{Aggregate, ToSql}, + types::{ + AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, QueryExecutionError, + }, +}; +use crate::configs::settings::Database; + +#[derive(Debug, Clone)] +pub struct SqlxClient { + pool: Pool, +} + +impl Default for SqlxClient { + fn default() -> Self { + let database_url = format!( + "postgres://{}:{}@{}:{}/{}", + "db_user", "db_pass", "localhost", 5432, "hyperswitch_db" + ); + Self { + #[allow(clippy::expect_used)] + pool: PgPoolOptions::new() + .connect_lazy(&database_url) + .expect("SQLX Pool Creation failed"), + } + } +} + +impl SqlxClient { + pub async fn from_conf( + conf: &Database, + #[cfg(feature = "kms")] kms_client: &kms::KmsClient, + ) -> Self { + #[cfg(feature = "kms")] + #[allow(clippy::expect_used)] + let password = conf + .password + .decrypt_inner(kms_client) + .await + .expect("Failed to KMS decrypt database password"); + + #[cfg(not(feature = "kms"))] + let password = &conf.password.peek(); + let database_url = format!( + "postgres://{}:{}@{}:{}/{}", + conf.username, password, conf.host, conf.port, conf.dbname + ); + #[allow(clippy::expect_used)] + let pool = PgPoolOptions::new() + .max_connections(conf.pool_size) + .acquire_timeout(std::time::Duration::from_secs(conf.connection_timeout)) + .connect_lazy(&database_url) + .expect("SQLX Pool Creation failed"); + Self { pool } + } +} + +pub trait DbType { + fn name() -> &'static str; +} + +macro_rules! db_type { + ($a: ident, $str: tt) => { + impl DbType for $a { + fn name() -> &'static str { + stringify!($str) + } + } + }; + ($a:ident) => { + impl DbType for $a { + fn name() -> &'static str { + stringify!($a) + } + } + }; +} + +db_type!(Currency); +db_type!(AuthenticationType); +db_type!(AttemptStatus); +db_type!(PaymentMethod, TEXT); +db_type!(RefundStatus); +db_type!(RefundType); + +impl<'q, Type> Encode<'q, Postgres> for DBEnumWrapper +where + Type: DbType + FromStr + Display, +{ + fn encode_by_ref(&self, buf: &mut PgArgumentBuffer) -> sqlx::encode::IsNull { + self.0.to_string().encode(buf) + } + fn size_hint(&self) -> usize { + self.0.to_string().size_hint() + } +} + +impl<'r, Type> Decode<'r, Postgres> for DBEnumWrapper +where + Type: DbType + FromStr + Display, +{ + fn decode( + value: PgValueRef<'r>, + ) -> Result> { + let str_value = <&'r str as Decode<'r, Postgres>>::decode(value)?; + Type::from_str(str_value).map(DBEnumWrapper).or(Err(format!( + "invalid value {:?} for enum {}", + str_value, + Type::name() + ) + .into())) + } +} + +impl sqlx::Type for DBEnumWrapper +where + Type: DbType + FromStr + Display, +{ + fn type_info() -> PgTypeInfo { + PgTypeInfo::with_name(Type::name()) + } +} + +impl LoadRow for SqlxClient +where + for<'a> T: FromRow<'a, PgRow>, +{ + fn load_row(row: PgRow) -> CustomResult { + T::from_row(&row) + .into_report() + .change_context(QueryExecutionError::RowExtractionFailure) + } +} + +impl super::payments::filters::PaymentFilterAnalytics for SqlxClient {} +impl super::payments::metrics::PaymentMetricAnalytics for SqlxClient {} +impl super::refunds::metrics::RefundMetricAnalytics for SqlxClient {} +impl super::refunds::filters::RefundFilterAnalytics for SqlxClient {} + +#[async_trait::async_trait] +impl AnalyticsDataSource for SqlxClient { + type Row = PgRow; + + async fn load_results(&self, query: &str) -> CustomResult, QueryExecutionError> + where + Self: LoadRow, + { + sqlx::query(&format!("{query};")) + .fetch_all(&self.pool) + .await + .into_report() + .change_context(QueryExecutionError::DatabaseError) + .attach_printable_lazy(|| format!("Failed to run query {query}"))? + .into_iter() + .map(Self::load_row) + .collect::, _>>() + .change_context(QueryExecutionError::RowExtractionFailure) + } +} + +impl<'a> FromRow<'a, PgRow> for super::refunds::metrics::RefundMetricRow { + fn from_row(row: &'a PgRow) -> sqlx::Result { + let currency: Option> = + row.try_get("currency").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let refund_status: Option> = + row.try_get("refund_status").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let connector: Option = row.try_get("connector").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let refund_type: Option> = + row.try_get("refund_type").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let total: Option = row.try_get("total").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let count: Option = row.try_get("count").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + + let start_bucket: Option = row + .try_get::, _>("start_bucket")? + .and_then(|dt| dt.replace_millisecond(0).ok()); + let end_bucket: Option = row + .try_get::, _>("end_bucket")? + .and_then(|dt| dt.replace_millisecond(0).ok()); + Ok(Self { + currency, + refund_status, + connector, + refund_type, + total, + count, + start_bucket, + end_bucket, + }) + } +} + +impl<'a> FromRow<'a, PgRow> for super::payments::metrics::PaymentMetricRow { + fn from_row(row: &'a PgRow) -> sqlx::Result { + let currency: Option> = + row.try_get("currency").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let status: Option> = + row.try_get("status").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let connector: Option = row.try_get("connector").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let authentication_type: Option> = + row.try_get("authentication_type").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let payment_method: Option = + row.try_get("payment_method").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let total: Option = row.try_get("total").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let count: Option = row.try_get("count").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + + let start_bucket: Option = row + .try_get::, _>("start_bucket")? + .and_then(|dt| dt.replace_millisecond(0).ok()); + let end_bucket: Option = row + .try_get::, _>("end_bucket")? + .and_then(|dt| dt.replace_millisecond(0).ok()); + Ok(Self { + currency, + status, + connector, + authentication_type, + payment_method, + total, + count, + start_bucket, + end_bucket, + }) + } +} + +impl<'a> FromRow<'a, PgRow> for super::payments::filters::FilterRow { + fn from_row(row: &'a PgRow) -> sqlx::Result { + let currency: Option> = + row.try_get("currency").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let status: Option> = + row.try_get("status").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let connector: Option = row.try_get("connector").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let authentication_type: Option> = + row.try_get("authentication_type").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let payment_method: Option = + row.try_get("payment_method").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + Ok(Self { + currency, + status, + connector, + authentication_type, + payment_method, + }) + } +} + +impl<'a> FromRow<'a, PgRow> for super::refunds::filters::RefundFilterRow { + fn from_row(row: &'a PgRow) -> sqlx::Result { + let currency: Option> = + row.try_get("currency").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let refund_status: Option> = + row.try_get("refund_status").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let connector: Option = row.try_get("connector").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let refund_type: Option> = + row.try_get("refund_type").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + Ok(Self { + currency, + refund_status, + connector, + refund_type, + }) + } +} + +impl ToSql for PrimitiveDateTime { + fn to_sql(&self) -> error_stack::Result { + Ok(self.to_string()) + } +} + +impl ToSql for AnalyticsCollection { + fn to_sql(&self) -> error_stack::Result { + match self { + Self::Payment => Ok("payment_attempt".to_string()), + Self::Refund => Ok("refund".to_string()), + } + } +} + +impl ToSql for Aggregate +where + T: ToSql, +{ + fn to_sql(&self) -> error_stack::Result { + Ok(match self { + Self::Count { field: _, alias } => { + format!( + "count(*){}", + alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias)) + ) + } + Self::Sum { field, alias } => { + format!( + "sum({}){}", + field.to_sql().attach_printable("Failed to sum aggregate")?, + alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias)) + ) + } + Self::Min { field, alias } => { + format!( + "min({}){}", + field.to_sql().attach_printable("Failed to min aggregate")?, + alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias)) + ) + } + Self::Max { field, alias } => { + format!( + "max({}){}", + field.to_sql().attach_printable("Failed to max aggregate")?, + alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias)) + ) + } + }) + } +} diff --git a/crates/router/src/analytics/types.rs b/crates/router/src/analytics/types.rs new file mode 100644 index 000000000000..fe20e812a9b8 --- /dev/null +++ b/crates/router/src/analytics/types.rs @@ -0,0 +1,119 @@ +use std::{fmt::Display, str::FromStr}; + +use common_utils::{ + errors::{CustomResult, ErrorSwitch, ParsingError}, + events::ApiEventMetric, +}; +use error_stack::{report, Report, ResultExt}; + +use super::query::QueryBuildingError; + +#[derive(serde::Deserialize, Debug, masking::Serialize)] +#[serde(rename_all = "snake_case")] +pub enum AnalyticsDomain { + Payments, + Refunds, +} + +impl ApiEventMetric for AnalyticsDomain {} + +#[derive(Debug, strum::AsRefStr, strum::Display, Clone, Copy)] +pub enum AnalyticsCollection { + Payment, + Refund, +} + +#[derive(Debug, serde::Serialize, serde::Deserialize, Eq, PartialEq)] +#[serde(transparent)] +pub struct DBEnumWrapper(pub T); + +impl AsRef for DBEnumWrapper { + fn as_ref(&self) -> &T { + &self.0 + } +} + +impl FromStr for DBEnumWrapper +where + T: FromStr + Display, +{ + type Err = Report; + + fn from_str(s: &str) -> Result { + T::from_str(s) + .map_err(|_er| report!(ParsingError::EnumParseFailure(std::any::type_name::()))) + .map(DBEnumWrapper) + .attach_printable_lazy(|| format!("raw_value: {s}")) + } +} + +// Analytics Framework + +pub trait RefundAnalytics {} + +#[async_trait::async_trait] +pub trait AnalyticsDataSource +where + Self: Sized + Sync + Send, +{ + type Row; + async fn load_results(&self, query: &str) -> CustomResult, QueryExecutionError> + where + Self: LoadRow; +} + +pub trait LoadRow +where + Self: AnalyticsDataSource, + T: Sized, +{ + fn load_row(row: Self::Row) -> CustomResult; +} + +#[derive(thiserror::Error, Debug)] +pub enum MetricsError { + #[error("Error building query")] + QueryBuildingError, + #[error("Error running Query")] + QueryExecutionFailure, + #[error("Error processing query results")] + PostProcessingFailure, + #[allow(dead_code)] + #[error("Not Implemented")] + NotImplemented, +} + +#[derive(Debug, thiserror::Error)] +pub enum QueryExecutionError { + #[error("Failed to extract domain rows")] + RowExtractionFailure, + #[error("Database error")] + DatabaseError, +} + +pub type MetricsResult = CustomResult; + +impl ErrorSwitch for QueryBuildingError { + fn switch(&self) -> MetricsError { + MetricsError::QueryBuildingError + } +} + +pub type FiltersResult = CustomResult; + +#[derive(thiserror::Error, Debug)] +pub enum FiltersError { + #[error("Error building query")] + QueryBuildingError, + #[error("Error running Query")] + QueryExecutionFailure, + #[allow(dead_code)] + #[error("Not Implemented")] + NotImplemented, +} + +impl ErrorSwitch for QueryBuildingError { + fn switch(&self) -> FiltersError { + FiltersError::QueryBuildingError + } +} diff --git a/crates/router/src/analytics/utils.rs b/crates/router/src/analytics/utils.rs new file mode 100644 index 000000000000..f7e6ea69dc37 --- /dev/null +++ b/crates/router/src/analytics/utils.rs @@ -0,0 +1,22 @@ +use api_models::analytics::{ + payments::{PaymentDimensions, PaymentMetrics}, + refunds::{RefundDimensions, RefundMetrics}, + NameDescription, +}; +use strum::IntoEnumIterator; + +pub fn get_payment_dimensions() -> Vec { + PaymentDimensions::iter().map(Into::into).collect() +} + +pub fn get_refund_dimensions() -> Vec { + RefundDimensions::iter().map(Into::into).collect() +} + +pub fn get_payment_metrics_info() -> Vec { + PaymentMetrics::iter().map(Into::into).collect() +} + +pub fn get_refund_metrics_info() -> Vec { + RefundMetrics::iter().map(Into::into).collect() +} diff --git a/crates/router/src/configs/settings.rs b/crates/router/src/configs/settings.rs index df87c8a460ac..c5b71c6f7341 100644 --- a/crates/router/src/configs/settings.rs +++ b/crates/router/src/configs/settings.rs @@ -16,6 +16,8 @@ pub use router_env::config::{Log, LogConsole, LogFile, LogTelemetry}; use scheduler::SchedulerSettings; use serde::{de::Error, Deserialize, Deserializer}; +#[cfg(feature = "olap")] +use crate::analytics::AnalyticsConfig; use crate::{ core::errors::{ApplicationError, ApplicationResult}, env::{self, logger, Env}, @@ -101,6 +103,8 @@ pub struct Settings { pub lock_settings: LockSettings, pub temp_locker_enable_config: TempLockerEnableConfig, pub payment_link: PaymentLink, + #[cfg(feature = "olap")] + pub analytics: AnalyticsConfig, #[cfg(feature = "kv_store")] pub kv_config: KvConfig, } diff --git a/crates/router/src/lib.rs b/crates/router/src/lib.rs index 38efe8b75134..5cd0b6cbea5f 100644 --- a/crates/router/src/lib.rs +++ b/crates/router/src/lib.rs @@ -1,6 +1,8 @@ #![forbid(unsafe_code)] #![recursion_limit = "256"] +#[cfg(feature = "olap")] +pub mod analytics; #[cfg(feature = "stripe")] pub mod compatibility; pub mod configs; @@ -141,6 +143,7 @@ pub fn mk_app( .service(routes::ApiKeys::server(state.clone())) .service(routes::Files::server(state.clone())) .service(routes::Disputes::server(state.clone())) + .service(routes::Analytics::server(state.clone())) .service(routes::Routing::server(state.clone())) .service(routes::Gsm::server(state.clone())) } diff --git a/crates/router/src/routes.rs b/crates/router/src/routes.rs index 47b9f23cf8cb..ac5c14200600 100644 --- a/crates/router/src/routes.rs +++ b/crates/router/src/routes.rs @@ -42,3 +42,5 @@ pub use self::app::{ }; #[cfg(feature = "stripe")] pub use super::compatibility::stripe::StripeApis; +#[cfg(feature = "olap")] +pub use crate::analytics::routes::{self as analytics, Analytics}; diff --git a/crates/router/src/routes/app.rs b/crates/router/src/routes/app.rs index ec87fcdc3900..67662961ed44 100644 --- a/crates/router/src/routes/app.rs +++ b/crates/router/src/routes/app.rs @@ -44,6 +44,8 @@ pub struct AppState { #[cfg(feature = "kms")] pub kms_secrets: Arc, pub api_client: Box, + #[cfg(feature = "olap")] + pub pool: crate::analytics::AnalyticsProvider, } impl scheduler::SchedulerAppState for AppState { @@ -128,6 +130,14 @@ impl AppState { ), }; + #[cfg(feature = "olap")] + let pool = crate::analytics::AnalyticsProvider::from_conf( + &conf.analytics, + #[cfg(feature = "kms")] + kms_client, + ) + .await; + #[cfg(feature = "kms")] #[allow(clippy::expect_used)] let kms_secrets = settings::ActiveKmsSecrets { @@ -149,6 +159,8 @@ impl AppState { kms_secrets: Arc::new(kms_secrets), api_client, event_handler: Box::::default(), + #[cfg(feature = "olap")] + pool, } } diff --git a/crates/router_env/src/lib.rs b/crates/router_env/src/lib.rs index d3612767ff9d..e75606aa1531 100644 --- a/crates/router_env/src/lib.rs +++ b/crates/router_env/src/lib.rs @@ -1,5 +1,5 @@ #![forbid(unsafe_code)] -#![warn(missing_docs, missing_debug_implementations)] +#![warn(missing_debug_implementations)] //! //! Environment of payment router: logger, basic config, its environment awareness. @@ -22,6 +22,7 @@ pub mod vergen; pub use logger::*; pub use once_cell; pub use opentelemetry; +use strum::Display; pub use tracing; #[cfg(feature = "actix_web")] pub use tracing_actix_web; @@ -29,3 +30,19 @@ pub use tracing_appender; #[doc(inline)] pub use self::env::*; +use crate::types::FlowMetric; + +/// Analytics Flow routes Enums +/// Info - Dimensions and filters available for the domain +/// Filters - Set of values present for the dimension +/// Metrics - Analytical data on dimensions and metrics +#[derive(Debug, Display, Clone, PartialEq, Eq)] +pub enum AnalyticsFlow { + GetInfo, + GetPaymentFilters, + GetRefundFilters, + GetRefundsMetrics, + GetPaymentMetrics, +} + +impl FlowMetric for AnalyticsFlow {} diff --git a/crates/router_env/src/metrics.rs b/crates/router_env/src/metrics.rs index e4943699ee5b..14402a7a6e91 100644 --- a/crates/router_env/src/metrics.rs +++ b/crates/router_env/src/metrics.rs @@ -63,3 +63,22 @@ macro_rules! histogram_metric { > = once_cell::sync::Lazy::new(|| $meter.f64_histogram($description).init()); }; } + +/// Create a [`Histogram`][Histogram] u64 metric with the specified name and an optional description, +/// associated with the specified meter. Note that the meter must be to a valid [`Meter`][Meter]. +/// +/// [Histogram]: opentelemetry::metrics::Histogram +/// [Meter]: opentelemetry::metrics::Meter +#[macro_export] +macro_rules! histogram_metric_u64 { + ($name:ident, $meter:ident) => { + pub(crate) static $name: once_cell::sync::Lazy< + $crate::opentelemetry::metrics::Histogram, + > = once_cell::sync::Lazy::new(|| $meter.u64_histogram(stringify!($name)).init()); + }; + ($name:ident, $meter:ident, $description:literal) => { + pub(crate) static $name: once_cell::sync::Lazy< + $crate::opentelemetry::metrics::Histogram, + > = once_cell::sync::Lazy::new(|| $meter.u64_histogram($description).init()); + }; +} diff --git a/loadtest/config/development.toml b/loadtest/config/development.toml index 352c4ff551bc..f70fc656d8e3 100644 --- a/loadtest/config/development.toml +++ b/loadtest/config/development.toml @@ -237,5 +237,17 @@ bank_debit.ach = { connector_list = "gocardless"} bank_debit.becs = { connector_list = "gocardless"} bank_debit.sepa = { connector_list = "gocardless"} +[analytics] +source = "sqlx" + +[analytics.sqlx] +username = "db_user" +password = "db_pass" +host = "localhost" +port = 5432 +dbname = "hyperswitch_db" +pool_size = 5 +connection_timeout = 10 + [kv_config] ttl = 300 # 5 * 60 seconds From b3d5062dc07676ec12e903b1999fdd9138c0891d Mon Sep 17 00:00:00 2001 From: Sampras Lopes Date: Fri, 10 Nov 2023 17:13:29 +0530 Subject: [PATCH 5/5] refactor(events): update api events to follow snake case naming (#2828) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- crates/common_utils/src/events.rs | 2 +- crates/router/src/events/api_logs.rs | 1 + crates/router/src/services/authentication.rs | 6 +++++- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/crates/common_utils/src/events.rs b/crates/common_utils/src/events.rs index 8c52f6c36d63..753f1deeb676 100644 --- a/crates/common_utils/src/events.rs +++ b/crates/common_utils/src/events.rs @@ -8,7 +8,7 @@ pub trait ApiEventMetric { } #[derive(Clone, Debug, Eq, PartialEq, Serialize)] -#[serde(tag = "flow_type")] +#[serde(tag = "flow_type", rename_all = "snake_case")] pub enum ApiEventsType { Payout, Payment { diff --git a/crates/router/src/events/api_logs.rs b/crates/router/src/events/api_logs.rs index 1a47568e7ad8..873102e81ec2 100644 --- a/crates/router/src/events/api_logs.rs +++ b/crates/router/src/events/api_logs.rs @@ -22,6 +22,7 @@ use crate::{ }; #[derive(Clone, Debug, Eq, PartialEq, Serialize)] +#[serde(rename_all = "snake_case")] pub struct ApiEvent { api_flow: String, created_at_timestamp: i128, diff --git a/crates/router/src/services/authentication.rs b/crates/router/src/services/authentication.rs index faa7864aff5b..0a7f5189b904 100644 --- a/crates/router/src/services/authentication.rs +++ b/crates/router/src/services/authentication.rs @@ -29,7 +29,11 @@ pub struct AuthenticationData { } #[derive(Clone, Debug, Eq, PartialEq, Serialize)] -#[serde(tag = "api_auth_type")] +#[serde( + tag = "api_auth_type", + content = "authentication_data", + rename_all = "snake_case" +)] pub enum AuthenticationType { ApiKey { merchant_id: String,