diff --git a/CHANGELOG.md b/CHANGELOG.md index dfe703192a3a..3bbdac921fd7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,94 @@ All notable changes to HyperSwitch will be documented here. - - - +## 1.94.0 (2023-12-01) + +### Features + +- **user_role:** Add APIs for user roles ([#3013](https://github.com/juspay/hyperswitch/pull/3013)) ([`3fa0bdf`](https://github.com/juspay/hyperswitch/commit/3fa0bdf76558ec91df8d3beef3c36658cd138b37)) + +### Bug Fixes + +- **config:** Add kms decryption support for sqlx password ([#3029](https://github.com/juspay/hyperswitch/pull/3029)) ([`b593467`](https://github.com/juspay/hyperswitch/commit/b5934674e518f991a8a575ad01b971dd086eeb40)) + +### Refactors + +- **connector:** + - [Multisafe Pay] change error message from not supported to not implemented ([#2851](https://github.com/juspay/hyperswitch/pull/2851)) ([`668b943`](https://github.com/juspay/hyperswitch/commit/668b943403df2b3bb354dd093b8ec073a2618bda)) + - [Shift4] change error message from NotSupported to NotImplemented ([#2880](https://github.com/juspay/hyperswitch/pull/2880)) ([`bc79d52`](https://github.com/juspay/hyperswitch/commit/bc79d522c30aa036378cf1e01354c422585cc226)) + +**Full Changelog:** [`v1.93.0...v1.94.0`](https://github.com/juspay/hyperswitch/compare/v1.93.0...v1.94.0) + +- - - + + +## 1.93.0 (2023-11-30) + +### Features + +- **connector:** [BANKOFAMERICA] Add Required Fields for GPAY ([#3014](https://github.com/juspay/hyperswitch/pull/3014)) ([`d30b58a`](https://github.com/juspay/hyperswitch/commit/d30b58abb5e716b70c2dadec9e6f13c9e3403b6f)) +- **core:** Add ability to verify connector credentials before integrating the connector ([#2986](https://github.com/juspay/hyperswitch/pull/2986)) ([`39f255b`](https://github.com/juspay/hyperswitch/commit/39f255b4b209588dec35d780078c2ab7ceb37b10)) +- **router:** Make core changes in payments flow to support incremental authorization ([#3009](https://github.com/juspay/hyperswitch/pull/3009)) ([`1ca2ba4`](https://github.com/juspay/hyperswitch/commit/1ca2ba459495ff9340954c87a6ae3e4dce0e7b71)) +- **user:** Add support for dashboard metadata ([#3000](https://github.com/juspay/hyperswitch/pull/3000)) ([`6a2e4ab`](https://github.com/juspay/hyperswitch/commit/6a2e4ab4169820f35e953a949bd2e82e7f098ed2)) + +### Bug Fixes + +- **connector:** + - Move authorised status to charged in setup mandate ([#3017](https://github.com/juspay/hyperswitch/pull/3017)) ([`663754d`](https://github.com/juspay/hyperswitch/commit/663754d629d59a17ba9d4985fe04f9404ceb16b7)) + - [Trustpay] Add mapping to error code `800.100.165` and `900.100.100` ([#2925](https://github.com/juspay/hyperswitch/pull/2925)) ([`8c37a8d`](https://github.com/juspay/hyperswitch/commit/8c37a8d857c5a58872fa2b2e194b85e755129677)) +- **core:** Error message on Refund update for `Not Implemented` Case ([#3011](https://github.com/juspay/hyperswitch/pull/3011)) ([`6b7ada1`](https://github.com/juspay/hyperswitch/commit/6b7ada1a34450ea3a7fc019375ba462a14ddd6ab)) +- **pm_list:** [Trustpay] Update Cards, Bank_redirect - blik pm type required field info for Trustpay ([#2999](https://github.com/juspay/hyperswitch/pull/2999)) ([`c05432c`](https://github.com/juspay/hyperswitch/commit/c05432c0bd70f222c2f898ce2cbb47a46364a490)) +- **router:** + - [Dlocal] connector transaction id fix ([#2872](https://github.com/juspay/hyperswitch/pull/2872)) ([`44b1f49`](https://github.com/juspay/hyperswitch/commit/44b1f4949ea06d59480670ccfa02446fa7713d13)) + - Use default value for the routing algorithm column during business profile creation ([#2791](https://github.com/juspay/hyperswitch/pull/2791)) ([`b1fe76a`](https://github.com/juspay/hyperswitch/commit/b1fe76a82b4026d6eaa3baf4356378040880a458)) +- **routing:** Fix kgraph to exclude PM auth during construction ([#3019](https://github.com/juspay/hyperswitch/pull/3019)) ([`c6cb527`](https://github.com/juspay/hyperswitch/commit/c6cb527f07e23796c342f3562fbf3b61f1ef6801)) + +### Refactors + +- **connector:** + - [Stax] change error message from NotSupported to NotImplemented ([#2879](https://github.com/juspay/hyperswitch/pull/2879)) ([`8a4dabc`](https://github.com/juspay/hyperswitch/commit/8a4dabc61df3e6012e50f785d93808ca3349be65)) + - [Volt] change error message from NotSupported to NotImplemented ([#2878](https://github.com/juspay/hyperswitch/pull/2878)) ([`de8e31b`](https://github.com/juspay/hyperswitch/commit/de8e31b70d9b3c11e268cd1deffa71918dc4270d)) + - [Adyen] Change country and issuer type to Optional for OpenBankingUk ([#2993](https://github.com/juspay/hyperswitch/pull/2993)) ([`ab3dac7`](https://github.com/juspay/hyperswitch/commit/ab3dac79b4f138cd1f60a9afc0635dcc137a4a05)) +- **postman:** Fix payme postman collection for handling `order_details` ([#2996](https://github.com/juspay/hyperswitch/pull/2996)) ([`1e60c71`](https://github.com/juspay/hyperswitch/commit/1e60c710985b341a118bb32962bd74b406d78f69)) + +**Full Changelog:** [`v1.92.0...v1.93.0`](https://github.com/juspay/hyperswitch/compare/v1.92.0...v1.93.0) + +- - - + + +## 1.92.0 (2023-11-29) + +### Features + +- **analytics:** Add Clickhouse based analytics ([#2988](https://github.com/juspay/hyperswitch/pull/2988)) ([`9df4e01`](https://github.com/juspay/hyperswitch/commit/9df4e0193ffeb6d1cc323bdebb7e2bdfb2a375e2)) +- **ses_email:** Add email services to hyperswitch ([#2977](https://github.com/juspay/hyperswitch/pull/2977)) ([`5f5e895`](https://github.com/juspay/hyperswitch/commit/5f5e895f638701a0e6ab3deea9101ef39033dd16)) + +### Bug Fixes + +- **router:** Make use of warning to log errors when apple pay metadata parsing fails ([#3010](https://github.com/juspay/hyperswitch/pull/3010)) ([`2e57745`](https://github.com/juspay/hyperswitch/commit/2e57745352c547323ac2df2554f6bc2dbd6da37f)) + +**Full Changelog:** [`v1.91.1...v1.92.0`](https://github.com/juspay/hyperswitch/compare/v1.91.1...v1.92.0) + +- - - + + +## 1.91.1 (2023-11-29) + +### Bug Fixes + +- Remove `dummy_connector` from `default` features in `common_enums` ([#3005](https://github.com/juspay/hyperswitch/pull/3005)) ([`bb593ab`](https://github.com/juspay/hyperswitch/commit/bb593ab0cd1a30190b6c305f2432de83ac7fde93)) +- Remove error propagation if card name not found in locker in case of temporary token ([#3006](https://github.com/juspay/hyperswitch/pull/3006)) ([`5c32b37`](https://github.com/juspay/hyperswitch/commit/5c32b3739e2c5895fe7f5cf8cc92f917c2639eac)) +- Few fields were not getting updated in apply_changeset function ([#3002](https://github.com/juspay/hyperswitch/pull/3002)) ([`d289524`](https://github.com/juspay/hyperswitch/commit/d289524869f0c3835db9cf90d57ebedf560e0291)) + +### Miscellaneous Tasks + +- **deps:** Bump openssl from 0.10.57 to 0.10.60 ([#3004](https://github.com/juspay/hyperswitch/pull/3004)) ([`1c2f35a`](https://github.com/juspay/hyperswitch/commit/1c2f35af92608fca5836448710eca9f9c23a776a)) + +**Full Changelog:** [`v1.91.0...v1.91.1`](https://github.com/juspay/hyperswitch/compare/v1.91.0...v1.91.1) + +- - - + + ## 1.91.0 (2023-11-28) ### Features diff --git a/Cargo.lock b/Cargo.lock index 2ca33b6910a0..e8719b29f51d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -332,6 +332,36 @@ version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" +[[package]] +name = "analytics" +version = "0.1.0" +dependencies = [ + "actix-web", + "api_models", + "async-trait", + "aws-config", + "aws-sdk-lambda", + "aws-smithy-types", + "bigdecimal", + "common_utils", + "diesel_models", + "error-stack", + "external_services", + "futures 0.3.28", + "masking", + "once_cell", + "reqwest", + "router_env", + "serde", + "serde_json", + "sqlx", + "storage_impl", + "strum 0.25.0", + "thiserror", + "time", + "tokio 1.32.0", +] + [[package]] name = "android-tzdata" version = "0.1.1" @@ -729,6 +759,31 @@ dependencies = [ "tracing", ] +[[package]] +name = "aws-sdk-lambda" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3ad176ffaa3aafa532246eb6a9f18a7d68da19950704ecc95d33d9dc3c62a9b" +dependencies = [ + "aws-credential-types", + "aws-endpoint", + "aws-http", + "aws-sig-auth", + "aws-smithy-async", + "aws-smithy-client", + "aws-smithy-http", + "aws-smithy-http-tower", + "aws-smithy-json", + "aws-smithy-types", + "aws-types", + "bytes 1.5.0", + "http", + "regex", + "tokio-stream", + "tower", + "tracing", +] + [[package]] name = "aws-sdk-s3" version = "0.28.0" @@ -1148,6 +1203,7 @@ dependencies = [ "num-bigint", "num-integer", "num-traits", + "serde", ] [[package]] @@ -1256,7 +1312,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f404657a7ea7b5249e36808dff544bc88a28f26e0ac40009f674b7a009d14be3" dependencies = [ "once_cell", - "proc-macro-crate", + "proc-macro-crate 2.0.0", "proc-macro2", "quote", "syn 2.0.38", @@ -2366,11 +2422,14 @@ dependencies = [ "aws-config", "aws-sdk-kms", "aws-sdk-sesv2", + "aws-sdk-sts", "aws-smithy-client", "base64 0.21.4", "common_utils", "dyn-clone", "error-stack", + "hyper", + "hyper-proxy", "masking", "once_cell", "router_env", @@ -2867,6 +2926,30 @@ dependencies = [ "hashbrown 0.14.1", ] +[[package]] +name = "headers" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06683b93020a07e3dbcf5f8c0f6d40080d725bea7936fc01ad345c01b97dc270" +dependencies = [ + "base64 0.21.4", + "bytes 1.5.0", + "headers-core", + "http", + "httpdate", + "mime", + "sha1", +] + +[[package]] +name = "headers-core" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429" +dependencies = [ + "http", +] + [[package]] name = "heck" version = "0.4.1" @@ -2994,6 +3077,24 @@ dependencies = [ "want", ] +[[package]] +name = "hyper-proxy" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca815a891b24fdfb243fa3239c86154392b0953ee584aa1a2a1f66d20cbe75cc" +dependencies = [ + "bytes 1.5.0", + "futures 0.3.28", + "headers", + "http", + "hyper", + "hyper-tls", + "native-tls", + "tokio 1.32.0", + "tokio-native-tls", + "tower-service", +] + [[package]] name = "hyper-rustls" version = "0.23.2" @@ -3696,6 +3797,12 @@ dependencies = [ "uuid", ] +[[package]] +name = "mutually_exclusive_features" +version = "0.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d02c0b00610773bb7fc61d85e13d86c7858cbdf00e1a120bfc41bc055dbaa0e" + [[package]] name = "nanoid" version = "0.4.0" @@ -3817,6 +3924,27 @@ dependencies = [ "libc", ] +[[package]] +name = "num_enum" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9" +dependencies = [ + "num_enum_derive", +] + +[[package]] +name = "num_enum_derive" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" +dependencies = [ + "proc-macro-crate 1.3.1", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "object" version = "0.32.1" @@ -3861,9 +3989,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "openssl" -version = "0.10.57" +version = "0.10.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bac25ee399abb46215765b1cb35bc0212377e58a061560d8b29b024fd0430e7c" +checksum = "79a4c6c3a2b158f7f8f2a2fc5a969fa3a068df6fc9dbb4a43845436e3af7c800" dependencies = [ "bitflags 2.4.0", "cfg-if 1.0.0", @@ -3893,9 +4021,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.93" +version = "0.9.96" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db4d56a4c0478783083cfafcc42493dd4a981d41669da64b4572a2a089b51b1d" +checksum = "3812c071ba60da8b5677cc12bcb1d42989a65553772897a7e0355545a819838f" dependencies = [ "cc", "libc", @@ -4350,6 +4478,16 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "proc-macro-crate" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +dependencies = [ + "once_cell", + "toml_edit 0.19.10", +] + [[package]] name = "proc-macro-crate" version = "2.0.0" @@ -4643,6 +4781,36 @@ dependencies = [ "crossbeam-utils 0.8.16", ] +[[package]] +name = "rdkafka" +version = "0.36.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d54f02a5a40220f8a2dfa47ddb38ba9064475a5807a69504b6f91711df2eea63" +dependencies = [ + "futures-channel", + "futures-util", + "libc", + "log", + "rdkafka-sys", + "serde", + "serde_derive", + "serde_json", + "slab", + "tokio 1.32.0", +] + +[[package]] +name = "rdkafka-sys" +version = "4.7.0+2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55e0d2f9ba6253f6ec72385e453294f8618e9e15c2c6aba2a5c01ccf9622d615" +dependencies = [ + "libc", + "libz-sys", + "num_enum", + "pkg-config", +] + [[package]] name = "redis-protocol" version = "4.1.0" @@ -4894,6 +5062,7 @@ dependencies = [ "actix-multipart", "actix-rt", "actix-web", + "analytics", "api_models", "argon2", "async-bb8-diesel", @@ -4943,6 +5112,7 @@ dependencies = [ "qrcode", "rand 0.8.5", "rand_chacha 0.3.1", + "rdkafka", "redis_interface", "regex", "reqwest", @@ -6668,11 +6838,12 @@ dependencies = [ [[package]] name = "tracing-actix-web" -version = "0.7.8" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a512ec11fae6c666707625e84f83e5d58f941e9ab15723289c0d380edfe48f09" +checksum = "1fe0d5feac3f4ca21ba33496bcb1ccab58cca6412b1405ae80f0581541e0ca78" dependencies = [ "actix-web", + "mutually_exclusive_features", "opentelemetry", "pin-project", "tracing", diff --git a/Dockerfile b/Dockerfile index 8eb321dd2afd..e9591e5e9f27 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM rust:slim-bookworm as builder +FROM rust:bookworm as builder ARG EXTRA_FEATURES="" @@ -36,7 +36,7 @@ RUN cargo build --release --features release ${EXTRA_FEATURES} -FROM debian:bookworm-slim +FROM debian:bookworm # Placing config and binary executable in different directories ARG CONFIG_DIR=/local/config diff --git a/config/config.example.toml b/config/config.example.toml index 0b8730ca114a..d935a4e7f20d 100644 --- a/config/config.example.toml +++ b/config/config.example.toml @@ -322,9 +322,17 @@ region = "" # The AWS region used by the KMS SDK for decrypting data. # EmailClient configuration. Only applicable when the `email` feature flag is enabled. [email] -from_email = "notify@example.com" # Sender email -aws_region = "" # AWS region used by AWS SES -base_url = "" # Base url used when adding links that should redirect to self +sender_email = "example@example.com" # Sender email +aws_region = "" # AWS region used by AWS SES +base_url = "" # Base url used when adding links that should redirect to self +allowed_unverified_days = 1 # Number of days the api calls ( with jwt token ) can be made without verifying the email +active_email_client = "SES" # The currently active email client + +# Configuration for aws ses, applicable when the active email client is SES +[email.aws_ses] +email_role_arn = "" # The amazon resource name ( arn ) of the role which has permission to send emails +sts_role_session_name = "" # An identifier for the assumed role session, used to uniquely identify a session. + #tokenization configuration which describe token lifetime and payment method for specific connector [tokenization] @@ -427,9 +435,6 @@ credit = { currency = "USD" } debit = { currency = "USD" } ach = { currency = "USD" } -[pm_filters.stripe] -cashapp = { country = "US", currency = "USD" } - [pm_filters.prophetpay] card_redirect = { currency = "USD" } diff --git a/config/development.toml b/config/development.toml index bcf561dd5857..fa5fddb0d60a 100644 --- a/config/development.toml +++ b/config/development.toml @@ -212,9 +212,15 @@ disabled = false consumer_group = "SCHEDULER_GROUP" [email] -from_email = "notify@example.com" +sender_email = "example@example.com" aws_region = "" -base_url = "" +base_url = "http://localhost:8080" +allowed_unverified_days = 1 +active_email_client = "SES" + +[email.aws_ses] +email_role_arn = "" +sts_role_session_name = "" [bank_config.eps] stripe = { banks = "arzte_und_apotheker_bank,austrian_anadi_bank_ag,bank_austria,bankhaus_carl_spangler,bankhaus_schelhammer_und_schattera_ag,bawag_psk_ag,bks_bank_ag,brull_kallmus_bank_ag,btv_vier_lander_bank,capital_bank_grawe_gruppe_ag,dolomitenbank,easybank_ag,erste_bank_und_sparkassen,hypo_alpeadriabank_international_ag,hypo_noe_lb_fur_niederosterreich_u_wien,hypo_oberosterreich_salzburg_steiermark,hypo_tirol_bank_ag,hypo_vorarlberg_bank_ag,hypo_bank_burgenland_aktiengesellschaft,marchfelder_bank,oberbank_ag,raiffeisen_bankengruppe_osterreich,schoellerbank_ag,sparda_bank_wien,volksbank_gruppe,volkskreditbank_ag,vr_bank_braunau" } @@ -469,3 +475,33 @@ delay_between_retries_in_milliseconds = 500 [kv_config] ttl = 900 # 15 * 60 seconds + +[events] +source = "logs" + +[events.kafka] +brokers = ["localhost:9092"] +intent_analytics_topic = "hyperswitch-payment-intent-events" +attempt_analytics_topic = "hyperswitch-payment-attempt-events" +refund_analytics_topic = "hyperswitch-refund-events" +api_logs_topic = "hyperswitch-api-log-events" +connector_events_topic = "hyperswitch-connector-api-events" + +[analytics] +source = "sqlx" + +[analytics.clickhouse] +username = "default" +# password = "" +host = "http://localhost:8123" +database_name = "default" + +[analytics.sqlx] +username = "db_user" +password = "db_pass" +host = "localhost" +port = 5432 +dbname = "hyperswitch_db" +pool_size = 5 +connection_timeout = 10 +queue_strategy = "Fifo" \ No newline at end of file diff --git a/config/docker_compose.toml b/config/docker_compose.toml index 445e1e856846..4d50600e1bf8 100644 --- a/config/docker_compose.toml +++ b/config/docker_compose.toml @@ -333,16 +333,32 @@ supported_connectors = "braintree" redis_lock_expiry_seconds = 180 # 3 * 60 seconds delay_between_retries_in_milliseconds = 500 +[events.kafka] +brokers = ["localhost:9092"] +intent_analytics_topic = "hyperswitch-payment-intent-events" +attempt_analytics_topic = "hyperswitch-payment-attempt-events" +refund_analytics_topic = "hyperswitch-refund-events" +api_logs_topic = "hyperswitch-api-log-events" +connector_events_topic = "hyperswitch-connector-api-events" + [analytics] source = "sqlx" +[analytics.clickhouse] +username = "default" +# password = "" +host = "http://localhost:8123" +database_name = "default" + [analytics.sqlx] username = "db_user" password = "db_pass" -host = "pg" +host = "localhost" port = 5432 dbname = "hyperswitch_db" pool_size = 5 +connection_timeout = 10 +queue_strategy = "Fifo" [kv_config] ttl = 900 # 15 * 60 seconds diff --git a/connector-template/transformers.rs b/connector-template/transformers.rs index 3ed53a906a2e..bdbfb2e45672 100644 --- a/connector-template/transformers.rs +++ b/connector-template/transformers.rs @@ -130,6 +130,7 @@ impl TryFrom kafka-ui is a visual tool for inspecting kafka on localhost:8090 + +#### Setting up Clickhouse + +Once clickhouse is up & running you need to create the required tables for it + +you can either visit the url (http://localhost:8123/play) in which the clickhouse-server is running to get a playground +Alternatively you can bash into the clickhouse container & execute commands manually +``` +# On your local terminal +docker compose exec clickhouse-server bash + +# Inside the clickhouse-server container shell +clickhouse-client --user default + +# Inside the clickhouse-client shell +SHOW TABLES; +CREATE TABLE ...... +``` + +The table creation scripts are provided [here](./scripts) + +#### Running/Debugging your application +Once setup you can run your application either via docker compose or normally via cargo run + +Remember to enable the kafka_events via development.toml/docker_compose.toml files + +Inspect the [kafka-ui](http://localhost:8090) to check the messages being inserted in queue + +If the messages/topic are available then you can run select queries on your clickhouse table to ensure data is being populated... + +If the data is not being populated in clickhouse, you can check the error logs in clickhouse server via +``` +# Inside the clickhouse-server container shell +tail -f /var/log/clickhouse-server/clickhouse-server.err.log +``` \ No newline at end of file diff --git a/crates/analytics/docs/clickhouse/cluster_setup/README.md b/crates/analytics/docs/clickhouse/cluster_setup/README.md new file mode 100644 index 000000000000..cd5f2dfeb023 --- /dev/null +++ b/crates/analytics/docs/clickhouse/cluster_setup/README.md @@ -0,0 +1,347 @@ +# Tutorial for set up clickhouse server + + +## Single server with docker + + +- Run server + +``` +docker run -d --name clickhouse-server -p 9000:9000 --ulimit nofile=262144:262144 yandex/clickhouse-server + +``` + +- Run client + +``` +docker run -it --rm --link clickhouse-server:clickhouse-server yandex/clickhouse-client --host clickhouse-server +``` + +Now you can see if it success setup or not. + + +## Setup Cluster + + +This part we will setup + +- 1 cluster, with 3 shards +- Each shard has 2 replica server +- Use ReplicatedMergeTree & Distributed table to setup our table. + + +### Cluster + +Let's see our docker-compose.yml first. + +``` +version: '3' + +services: + clickhouse-zookeeper: + image: zookeeper + ports: + - "2181:2181" + - "2182:2182" + container_name: clickhouse-zookeeper + hostname: clickhouse-zookeeper + + clickhouse-01: + image: yandex/clickhouse-server + hostname: clickhouse-01 + container_name: clickhouse-01 + ports: + - 9001:9000 + volumes: + - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml + - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml + - ./config/macros/macros-01.xml:/etc/clickhouse-server/config.d/macros.xml + # - ./data/server-01:/var/lib/clickhouse + ulimits: + nofile: + soft: 262144 + hard: 262144 + depends_on: + - "clickhouse-zookeeper" + + clickhouse-02: + image: yandex/clickhouse-server + hostname: clickhouse-02 + container_name: clickhouse-02 + ports: + - 9002:9000 + volumes: + - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml + - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml + - ./config/macros/macros-02.xml:/etc/clickhouse-server/config.d/macros.xml + # - ./data/server-02:/var/lib/clickhouse + ulimits: + nofile: + soft: 262144 + hard: 262144 + depends_on: + - "clickhouse-zookeeper" + + clickhouse-03: + image: yandex/clickhouse-server + hostname: clickhouse-03 + container_name: clickhouse-03 + ports: + - 9003:9000 + volumes: + - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml + - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml + - ./config/macros/macros-03.xml:/etc/clickhouse-server/config.d/macros.xml + # - ./data/server-03:/var/lib/clickhouse + ulimits: + nofile: + soft: 262144 + hard: 262144 + depends_on: + - "clickhouse-zookeeper" + + clickhouse-04: + image: yandex/clickhouse-server + hostname: clickhouse-04 + container_name: clickhouse-04 + ports: + - 9004:9000 + volumes: + - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml + - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml + - ./config/macros/macros-04.xml:/etc/clickhouse-server/config.d/macros.xml + # - ./data/server-04:/var/lib/clickhouse + ulimits: + nofile: + soft: 262144 + hard: 262144 + depends_on: + - "clickhouse-zookeeper" + + clickhouse-05: + image: yandex/clickhouse-server + hostname: clickhouse-05 + container_name: clickhouse-05 + ports: + - 9005:9000 + volumes: + - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml + - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml + - ./config/macros/macros-05.xml:/etc/clickhouse-server/config.d/macros.xml + # - ./data/server-05:/var/lib/clickhouse + ulimits: + nofile: + soft: 262144 + hard: 262144 + depends_on: + - "clickhouse-zookeeper" + + clickhouse-06: + image: yandex/clickhouse-server + hostname: clickhouse-06 + container_name: clickhouse-06 + ports: + - 9006:9000 + volumes: + - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml + - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml + - ./config/macros/macros-06.xml:/etc/clickhouse-server/config.d/macros.xml + # - ./data/server-06:/var/lib/clickhouse + ulimits: + nofile: + soft: 262144 + hard: 262144 + depends_on: + - "clickhouse-zookeeper" +networks: + default: + external: + name: clickhouse-net +``` + + +We have 6 clickhouse server container and one zookeeper container. + + +**To enable replication ZooKeeper is required. ClickHouse will take care of data consistency on all replicas and run restore procedure after failure automatically. It's recommended to deploy ZooKeeper cluster to separate servers.** + +**ZooKeeper is not a requirement — in some simple cases you can duplicate the data by writing it into all the replicas from your application code. This approach is not recommended — in this case ClickHouse is not able to guarantee data consistency on all replicas. This remains the responsibility of your application.** + + +Let's see config file. + +`./config/clickhouse_config.xml` is the default config file in docker, we copy it out and add this line + +``` + + /etc/clickhouse-server/metrika.xml +``` + + +So lets see `clickhouse_metrika.xml` + +``` + + + + + 1 + true + + clickhouse-01 + 9000 + + + clickhouse-06 + 9000 + + + + 1 + true + + clickhouse-02 + 9000 + + + clickhouse-03 + 9000 + + + + 1 + true + + + clickhouse-04 + 9000 + + + clickhouse-05 + 9000 + + + + + + + clickhouse-zookeeper + 2181 + + + + ::/0 + + + + 10000000000 + 0.01 + lz4 + + + +``` + +and macros.xml, each instances has there own macros settings, like server 1: + +``` + + + clickhouse-01 + 01 + 01 + + +``` + + +**Make sure your macros settings is equal to remote server settings in metrika.xml** + +So now you can start the server. + +``` +docker network create clickhouse-net +docker-compose up -d +``` + +Conn to server and see if the cluster settings fine; + +``` +docker run -it --rm --network="clickhouse-net" --link clickhouse-01:clickhouse-server yandex/clickhouse-client --host clickhouse-server +``` + +```sql +clickhouse-01 :) select * from system.clusters; + +SELECT * +FROM system.clusters + +┌─cluster─────────────────────┬─shard_num─┬─shard_weight─┬─replica_num─┬─host_name─────┬─host_address─┬─port─┬─is_local─┬─user────┬─default_database─┐ +│ cluster_1 │ 1 │ 1 │ 1 │ clickhouse-01 │ 172.21.0.4 │ 9000 │ 1 │ default │ │ +│ cluster_1 │ 1 │ 1 │ 2 │ clickhouse-06 │ 172.21.0.5 │ 9000 │ 1 │ default │ │ +│ cluster_1 │ 2 │ 1 │ 1 │ clickhouse-02 │ 172.21.0.8 │ 9000 │ 0 │ default │ │ +│ cluster_1 │ 2 │ 1 │ 2 │ clickhouse-03 │ 172.21.0.6 │ 9000 │ 0 │ default │ │ +│ cluster_1 │ 3 │ 1 │ 1 │ clickhouse-04 │ 172.21.0.7 │ 9000 │ 0 │ default │ │ +│ cluster_1 │ 3 │ 1 │ 2 │ clickhouse-05 │ 172.21.0.3 │ 9000 │ 0 │ default │ │ +│ test_shard_localhost │ 1 │ 1 │ 1 │ localhost │ 127.0.0.1 │ 9000 │ 1 │ default │ │ +│ test_shard_localhost_secure │ 1 │ 1 │ 1 │ localhost │ 127.0.0.1 │ 9440 │ 0 │ default │ │ +└─────────────────────────────┴───────────┴──────────────┴─────────────┴───────────────┴──────────────┴──────┴──────────┴─────────┴──────────────────┘ +``` + +If you see this, it means cluster's settings work well(but not conn fine). + + +### Replica Table + +So now we have a cluster and replica settings. For clickhouse, we need to create ReplicatedMergeTree Table as a local table in every server. + +```sql +CREATE TABLE ttt (id Int32) ENGINE = ReplicatedMergeTree('/clickhouse/tables/{layer}-{shard}/ttt', '{replica}') PARTITION BY id ORDER BY id +``` + +and Create Distributed Table conn to local table + +```sql +CREATE TABLE ttt_all as ttt ENGINE = Distributed(cluster_1, default, ttt, rand()); +``` + + +### Insert and test + +gen some data and test. + + +``` +# docker exec into client server 1 and +for ((idx=1;idx<=100;++idx)); do clickhouse-client --host clickhouse-server --query "Insert into default.ttt_all values ($idx)"; done; +``` + +For Distributed table. + +``` +select count(*) from ttt_all; +``` + +For loacl table. + +``` +select count(*) from ttt; +``` + + +## Authentication + +Please see config/users.xml + + +- Conn +```bash +docker run -it --rm --network="clickhouse-net" --link clickhouse-01:clickhouse-server yandex/clickhouse-client --host clickhouse-server -u user1 --password 123456 +``` + +## Source + +- https://clickhouse.yandex/docs/en/operations/table_engines/replication/#creating-replicated-tables diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/clickhouse_config.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/clickhouse_config.xml new file mode 100644 index 000000000000..94c854dc273a --- /dev/null +++ b/crates/analytics/docs/clickhouse/cluster_setup/config/clickhouse_config.xml @@ -0,0 +1,370 @@ + + + + + error + 1000M + 1 + 10 + + + + 8123 + 9000 + + + + + + + + + /etc/clickhouse-server/server.crt + /etc/clickhouse-server/server.key + + /etc/clickhouse-server/dhparam.pem + none + true + true + sslv2,sslv3 + true + + + + true + true + sslv2,sslv3 + true + + + + RejectCertificateHandler + + + + + + + + + 9009 + + + + + + + + + + + + + + + + + + + + 4096 + 3 + + + 100 + + + + + + 8589934592 + + + 5368709120 + + + + /var/lib/clickhouse/ + + + /var/lib/clickhouse/tmp/ + + + /var/lib/clickhouse/user_files/ + + + users.xml + + + default + + + + + + default + + + + + + + + + + + + + + localhost + 9000 + + + + + + + localhost + 9440 + 1 + + + + + + + + /etc/clickhouse-server/metrika.xml + + + + + + + + + 3600 + + + + 3600 + + + 60 + + + + + + + + + + system + query_log
+ + toYYYYMM(event_date) + + 7500 +
+ + + + + + + + + + + + + + + + *_dictionary.xml + + + + + + + + + + /clickhouse/task_queue/ddl + + + + + + + + + + + + + + + + click_cost + any + + 0 + 3600 + + + 86400 + 60 + + + + max + + 0 + 60 + + + 3600 + 300 + + + 86400 + 3600 + + + + + + /var/lib/clickhouse/format_schemas/ + + + +
+ diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/clickhouse_metrika.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/clickhouse_metrika.xml new file mode 100644 index 000000000000..b58ffc34bc29 --- /dev/null +++ b/crates/analytics/docs/clickhouse/cluster_setup/config/clickhouse_metrika.xml @@ -0,0 +1,60 @@ + + + + + 1 + true + + clickhouse-01 + 9000 + + + clickhouse-06 + 9000 + + + + 1 + true + + clickhouse-02 + 9000 + + + clickhouse-03 + 9000 + + + + 1 + true + + + clickhouse-04 + 9000 + + + clickhouse-05 + 9000 + + + + + + + clickhouse-zookeeper + 2181 + + + + ::/0 + + + + 10000000000 + 0.01 + lz4 + + + + diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-01.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-01.xml new file mode 100644 index 000000000000..75df1c5916e8 --- /dev/null +++ b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-01.xml @@ -0,0 +1,9 @@ + + + clickhouse-01 + 01 + 01 + data + cluster_1 + + diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-02.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-02.xml new file mode 100644 index 000000000000..67e4a545b30c --- /dev/null +++ b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-02.xml @@ -0,0 +1,9 @@ + + + clickhouse-02 + 02 + 01 + data + cluster_1 + + diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-03.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-03.xml new file mode 100644 index 000000000000..e9278191b80f --- /dev/null +++ b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-03.xml @@ -0,0 +1,9 @@ + + + clickhouse-03 + 02 + 01 + data + cluster_1 + + diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-04.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-04.xml new file mode 100644 index 000000000000..033c0ad1152e --- /dev/null +++ b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-04.xml @@ -0,0 +1,9 @@ + + + clickhouse-04 + 03 + 01 + data + cluster_1 + + diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-05.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-05.xml new file mode 100644 index 000000000000..c63314c5acea --- /dev/null +++ b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-05.xml @@ -0,0 +1,9 @@ + + + clickhouse-05 + 03 + 01 + data + cluster_1 + + diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-06.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-06.xml new file mode 100644 index 000000000000..4b01bda9948c --- /dev/null +++ b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-06.xml @@ -0,0 +1,9 @@ + + + clickhouse-06 + 01 + 01 + data + cluster_1 + + diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/users.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/users.xml new file mode 100644 index 000000000000..e1b8de78e37a --- /dev/null +++ b/crates/analytics/docs/clickhouse/cluster_setup/config/users.xml @@ -0,0 +1,117 @@ + + + + + + + + 10000000000 + + + 0 + + + random + + + + + 1 + + + + + + + 123456 + + ::/0 + + default + default + + + + + + + + + ::/0 + + + + default + + + default + + + + + + + ::1 + 127.0.0.1 + + readonly + default + + + + + + + + + + + 3600 + + + 0 + 0 + 0 + 0 + 0 + + + + diff --git a/crates/analytics/docs/clickhouse/cluster_setup/docker-compose.yml b/crates/analytics/docs/clickhouse/cluster_setup/docker-compose.yml new file mode 100644 index 000000000000..96d7618b47e6 --- /dev/null +++ b/crates/analytics/docs/clickhouse/cluster_setup/docker-compose.yml @@ -0,0 +1,198 @@ +version: '3' + +networks: + ckh_net: + +services: + clickhouse-zookeeper: + image: zookeeper + ports: + - "2181:2181" + - "2182:2182" + container_name: clickhouse-zookeeper + hostname: clickhouse-zookeeper + networks: + - ckh_net + + clickhouse-01: + image: clickhouse/clickhouse-server + hostname: clickhouse-01 + container_name: clickhouse-01 + networks: + - ckh_net + ports: + - 9001:9000 + - 8124:8123 + volumes: + - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml + - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml + - ./config/macros/macros-01.xml:/etc/clickhouse-server/config.d/macros.xml + - ./config/users.xml:/etc/clickhouse-server/users.xml + # - ./data/server-01:/var/lib/clickhouse + ulimits: + nofile: + soft: 262144 + hard: 262144 + depends_on: + - "clickhouse-zookeeper" + + clickhouse-02: + image: clickhouse/clickhouse-server + hostname: clickhouse-02 + container_name: clickhouse-02 + networks: + - ckh_net + ports: + - 9002:9000 + - 8125:8123 + volumes: + - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml + - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml + - ./config/macros/macros-02.xml:/etc/clickhouse-server/config.d/macros.xml + - ./config/users.xml:/etc/clickhouse-server/users.xml + # - ./data/server-02:/var/lib/clickhouse + ulimits: + nofile: + soft: 262144 + hard: 262144 + depends_on: + - "clickhouse-zookeeper" + + clickhouse-03: + image: clickhouse/clickhouse-server + hostname: clickhouse-03 + container_name: clickhouse-03 + networks: + - ckh_net + ports: + - 9003:9000 + - 8126:8123 + volumes: + - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml + - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml + - ./config/macros/macros-03.xml:/etc/clickhouse-server/config.d/macros.xml + - ./config/users.xml:/etc/clickhouse-server/users.xml + # - ./data/server-03:/var/lib/clickhouse + ulimits: + nofile: + soft: 262144 + hard: 262144 + depends_on: + - "clickhouse-zookeeper" + + clickhouse-04: + image: clickhouse/clickhouse-server + hostname: clickhouse-04 + container_name: clickhouse-04 + networks: + - ckh_net + ports: + - 9004:9000 + - 8127:8123 + volumes: + - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml + - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml + - ./config/macros/macros-04.xml:/etc/clickhouse-server/config.d/macros.xml + - ./config/users.xml:/etc/clickhouse-server/users.xml + # - ./data/server-04:/var/lib/clickhouse + ulimits: + nofile: + soft: 262144 + hard: 262144 + depends_on: + - "clickhouse-zookeeper" + + clickhouse-05: + image: clickhouse/clickhouse-server + hostname: clickhouse-05 + container_name: clickhouse-05 + networks: + - ckh_net + ports: + - 9005:9000 + - 8128:8123 + volumes: + - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml + - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml + - ./config/macros/macros-05.xml:/etc/clickhouse-server/config.d/macros.xml + - ./config/users.xml:/etc/clickhouse-server/users.xml + # - ./data/server-05:/var/lib/clickhouse + ulimits: + nofile: + soft: 262144 + hard: 262144 + depends_on: + - "clickhouse-zookeeper" + + clickhouse-06: + image: clickhouse/clickhouse-server + hostname: clickhouse-06 + container_name: clickhouse-06 + networks: + - ckh_net + ports: + - 9006:9000 + - 8129:8123 + volumes: + - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml + - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml + - ./config/macros/macros-06.xml:/etc/clickhouse-server/config.d/macros.xml + - ./config/users.xml:/etc/clickhouse-server/users.xml + # - ./data/server-06:/var/lib/clickhouse + ulimits: + nofile: + soft: 262144 + hard: 262144 + depends_on: + - "clickhouse-zookeeper" + + kafka0: + image: confluentinc/cp-kafka:7.0.5 + hostname: kafka0 + container_name: kafka0 + ports: + - 9092:9092 + - 9093 + - 9997 + - 29092 + environment: + KAFKA_BROKER_ID: 1 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONTROLLER:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092 + KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 + KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 + KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 + KAFKA_PROCESS_ROLES: 'broker,controller' + KAFKA_NODE_ID: 1 + KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka0:29093' + KAFKA_LISTENERS: 'PLAINTEXT://kafka0:29092,CONTROLLER://kafka0:29093,PLAINTEXT_HOST://0.0.0.0:9092' + KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' + KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' + JMX_PORT: 9997 + KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka0 -Dcom.sun.management.jmxremote.rmi.port=9997 + volumes: + - ./kafka-script.sh:/tmp/update_run.sh + command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" + networks: + ckh_net: + aliases: + - hyper-c1-kafka-brokers.kafka-cluster.svc.cluster.local + + + # Kafka UI for debugging kafka queues + kafka-ui: + container_name: kafka-ui + image: provectuslabs/kafka-ui:latest + ports: + - 8090:8080 + depends_on: + - kafka0 + networks: + - ckh_net + environment: + KAFKA_CLUSTERS_0_NAME: local + KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092 + KAFKA_CLUSTERS_0_JMXPORT: 9997 + diff --git a/crates/analytics/docs/clickhouse/cluster_setup/kafka-script.sh b/crates/analytics/docs/clickhouse/cluster_setup/kafka-script.sh new file mode 100755 index 000000000000..023c832b4e1b --- /dev/null +++ b/crates/analytics/docs/clickhouse/cluster_setup/kafka-script.sh @@ -0,0 +1,11 @@ +# This script is required to run kafka cluster (without zookeeper) +#!/bin/sh + +# Docker workaround: Remove check for KAFKA_ZOOKEEPER_CONNECT parameter +sed -i '/KAFKA_ZOOKEEPER_CONNECT/d' /etc/confluent/docker/configure + +# Docker workaround: Ignore cub zk-ready +sed -i 's/cub zk-ready/echo ignore zk-ready/' /etc/confluent/docker/ensure + +# KRaft required step: Format the storage directory with a new cluster ID +echo "kafka-storage format --ignore-formatted -t $(kafka-storage random-uuid) -c /etc/kafka/kafka.properties" >> /etc/confluent/docker/ensure \ No newline at end of file diff --git a/crates/analytics/docs/clickhouse/cluster_setup/scripts/api_event_logs.sql b/crates/analytics/docs/clickhouse/cluster_setup/scripts/api_event_logs.sql new file mode 100644 index 000000000000..0fe194a0e676 --- /dev/null +++ b/crates/analytics/docs/clickhouse/cluster_setup/scripts/api_event_logs.sql @@ -0,0 +1,237 @@ +CREATE TABLE hyperswitch.api_events_queue on cluster '{cluster}' ( + `merchant_id` String, + `payment_id` Nullable(String), + `refund_id` Nullable(String), + `payment_method_id` Nullable(String), + `payment_method` Nullable(String), + `payment_method_type` Nullable(String), + `customer_id` Nullable(String), + `user_id` Nullable(String), + `request_id` String, + `flow_type` LowCardinality(String), + `api_name` LowCardinality(String), + `request` String, + `response` String, + `status_code` UInt32, + `url_path` LowCardinality(Nullable(String)), + `event_type` LowCardinality(Nullable(String)), + `created_at` DateTime CODEC(T64, LZ4), + `latency` Nullable(UInt128), + `user_agent` Nullable(String), + `ip_addr` Nullable(String) +) ENGINE = Kafka SETTINGS kafka_broker_list = 'hyper-c1-kafka-brokers.kafka-cluster.svc.cluster.local:9092', +kafka_topic_list = 'hyperswitch-api-log-events', +kafka_group_name = 'hyper-c1', +kafka_format = 'JSONEachRow', +kafka_handle_error_mode = 'stream'; + + +CREATE TABLE hyperswitch.api_events_clustered on cluster '{cluster}' ( + `merchant_id` String, + `payment_id` Nullable(String), + `refund_id` Nullable(String), + `payment_method_id` Nullable(String), + `payment_method` Nullable(String), + `payment_method_type` Nullable(String), + `customer_id` Nullable(String), + `user_id` Nullable(String), + `request_id` Nullable(String), + `flow_type` LowCardinality(String), + `api_name` LowCardinality(String), + `request` String, + `response` String, + `status_code` UInt32, + `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `created_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `latency` Nullable(UInt128), + `user_agent` Nullable(String), + `ip_addr` Nullable(String), + INDEX flowIndex flow_type TYPE bloom_filter GRANULARITY 1, + INDEX apiIndex api_name TYPE bloom_filter GRANULARITY 1, + INDEX statusIndex status_code TYPE bloom_filter GRANULARITY 1 +) ENGINE = ReplicatedMergeTree( + '/clickhouse/{installation}/{cluster}/tables/{shard}/hyperswitch/api_events_clustered', + '{replica}' +) +PARTITION BY toStartOfDay(created_at) +ORDER BY + (created_at, merchant_id, flow_type, status_code, api_name) +TTL created_at + toIntervalMonth(6) +; + + +CREATE TABLE hyperswitch.api_events_dist on cluster '{cluster}' ( + `merchant_id` String, + `payment_id` Nullable(String), + `refund_id` Nullable(String), + `payment_method_id` Nullable(String), + `payment_method` Nullable(String), + `payment_method_type` Nullable(String), + `customer_id` Nullable(String), + `user_id` Nullable(String), + `request_id` Nullable(String), + `flow_type` LowCardinality(String), + `api_name` LowCardinality(String), + `request` String, + `response` String, + `status_code` UInt32, + `url_path` LowCardinality(Nullable(String)), + `event_type` LowCardinality(Nullable(String)), + `inserted_at` DateTime64(3), + `created_at` DateTime64(3), + `latency` Nullable(UInt128), + `user_agent` Nullable(String), + `ip_addr` Nullable(String) +) ENGINE = Distributed('{cluster}', 'hyperswitch', 'api_events_clustered', rand()); + +CREATE MATERIALIZED VIEW hyperswitch.api_events_mv on cluster '{cluster}' TO hyperswitch.api_events_dist ( + `merchant_id` String, + `payment_id` Nullable(String), + `refund_id` Nullable(String), + `payment_method_id` Nullable(String), + `payment_method` Nullable(String), + `payment_method_type` Nullable(String), + `customer_id` Nullable(String), + `user_id` Nullable(String), + `request_id` Nullable(String), + `flow_type` LowCardinality(String), + `api_name` LowCardinality(String), + `request` String, + `response` String, + `status_code` UInt32, + `url_path` LowCardinality(Nullable(String)), + `event_type` LowCardinality(Nullable(String)), + `inserted_at` DateTime64(3), + `created_at` DateTime64(3), + `latency` Nullable(UInt128), + `user_agent` Nullable(String), + `ip_addr` Nullable(String) +) AS +SELECT + merchant_id, + payment_id, + refund_id, + payment_method_id, + payment_method, + payment_method_type, + customer_id, + user_id, + request_id, + flow_type, + api_name, + request, + response, + status_code, + url_path, + event_type, + now() as inserted_at, + created_at, + latency, + user_agent, + ip_addr +FROM + hyperswitch.api_events_queue +WHERE length(_error) = 0; + + +CREATE MATERIALIZED VIEW hyperswitch.api_events_parse_errors on cluster '{cluster}' +( + `topic` String, + `partition` Int64, + `offset` Int64, + `raw` String, + `error` String +) +ENGINE = MergeTree +ORDER BY (topic, partition, offset) +SETTINGS index_granularity = 8192 AS +SELECT + _topic AS topic, + _partition AS partition, + _offset AS offset, + _raw_message AS raw, + _error AS error +FROM hyperswitch.api_events_queue +WHERE length(_error) > 0 +; + + +ALTER TABLE hyperswitch.api_events_clustered on cluster '{cluster}' ADD COLUMN `url_path` LowCardinality(Nullable(String)); +ALTER TABLE hyperswitch.api_events_clustered on cluster '{cluster}' ADD COLUMN `event_type` LowCardinality(Nullable(String)); + + +CREATE TABLE hyperswitch.api_audit_log ON CLUSTER '{cluster}' ( + `merchant_id` LowCardinality(String), + `payment_id` String, + `refund_id` Nullable(String), + `payment_method_id` Nullable(String), + `payment_method` Nullable(String), + `payment_method_type` Nullable(String), + `user_id` Nullable(String), + `request_id` Nullable(String), + `flow_type` LowCardinality(String), + `api_name` LowCardinality(String), + `request` String, + `response` String, + `status_code` UInt32, + `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `created_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `latency` Nullable(UInt128), + `user_agent` Nullable(String), + `ip_addr` Nullable(String), + `url_path` LowCardinality(Nullable(String)), + `event_type` LowCardinality(Nullable(String)), + `customer_id` LowCardinality(Nullable(String)) +) ENGINE = ReplicatedMergeTree( '/clickhouse/{installation}/{cluster}/tables/{shard}/hyperswitch/api_audit_log', '{replica}' ) PARTITION BY merchant_id +ORDER BY (merchant_id, payment_id) +TTL created_at + toIntervalMonth(18) +SETTINGS index_granularity = 8192 + + +CREATE MATERIALIZED VIEW hyperswitch.api_audit_log_mv ON CLUSTER `{cluster}` TO hyperswitch.api_audit_log( + `merchant_id` LowCardinality(String), + `payment_id` String, + `refund_id` Nullable(String), + `payment_method_id` Nullable(String), + `payment_method` Nullable(String), + `payment_method_type` Nullable(String), + `customer_id` Nullable(String), + `user_id` Nullable(String), + `request_id` Nullable(String), + `flow_type` LowCardinality(String), + `api_name` LowCardinality(String), + `request` String, + `response` String, + `status_code` UInt32, + `url_path` LowCardinality(Nullable(String)), + `event_type` LowCardinality(Nullable(String)), + `inserted_at` DateTime64(3), + `created_at` DateTime64(3), + `latency` Nullable(UInt128), + `user_agent` Nullable(String), + `ip_addr` Nullable(String) +) AS +SELECT + merchant_id, + multiIf(payment_id IS NULL, '', payment_id) AS payment_id, + refund_id, + payment_method_id, + payment_method, + payment_method_type, + customer_id, + user_id, + request_id, + flow_type, + api_name, + request, + response, + status_code, + url_path, + api_event_type AS event_type, + now() AS inserted_at, + created_at, + latency, + user_agent, + ip_addr +FROM hyperswitch.api_events_queue +WHERE length(_error) = 0 \ No newline at end of file diff --git a/crates/analytics/docs/clickhouse/cluster_setup/scripts/payment_attempts.sql b/crates/analytics/docs/clickhouse/cluster_setup/scripts/payment_attempts.sql new file mode 100644 index 000000000000..3a6281ae9050 --- /dev/null +++ b/crates/analytics/docs/clickhouse/cluster_setup/scripts/payment_attempts.sql @@ -0,0 +1,217 @@ +CREATE TABLE hyperswitch.payment_attempt_queue on cluster '{cluster}' ( + `payment_id` String, + `merchant_id` String, + `attempt_id` String, + `status` LowCardinality(String), + `amount` Nullable(UInt32), + `currency` LowCardinality(Nullable(String)), + `connector` LowCardinality(Nullable(String)), + `save_to_locker` Nullable(Bool), + `error_message` Nullable(String), + `offer_amount` Nullable(UInt32), + `surcharge_amount` Nullable(UInt32), + `tax_amount` Nullable(UInt32), + `payment_method_id` Nullable(String), + `payment_method` LowCardinality(Nullable(String)), + `payment_method_type` LowCardinality(Nullable(String)), + `connector_transaction_id` Nullable(String), + `capture_method` LowCardinality(Nullable(String)), + `capture_on` Nullable(DateTime) CODEC(T64, LZ4), + `confirm` Bool, + `authentication_type` LowCardinality(Nullable(String)), + `cancellation_reason` Nullable(String), + `amount_to_capture` Nullable(UInt32), + `mandate_id` Nullable(String), + `browser_info` Nullable(String), + `error_code` Nullable(String), + `connector_metadata` Nullable(String), + `payment_experience` Nullable(String), + `created_at` DateTime CODEC(T64, LZ4), + `last_synced` Nullable(DateTime) CODEC(T64, LZ4), + `modified_at` DateTime CODEC(T64, LZ4), + `sign_flag` Int8 +) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092', +kafka_topic_list = 'hyperswitch-payment-attempt-events', +kafka_group_name = 'hyper-c1', +kafka_format = 'JSONEachRow', +kafka_handle_error_mode = 'stream'; + + +CREATE TABLE hyperswitch.payment_attempt_dist on cluster '{cluster}' ( + `payment_id` String, + `merchant_id` String, + `attempt_id` String, + `status` LowCardinality(String), + `amount` Nullable(UInt32), + `currency` LowCardinality(Nullable(String)), + `connector` LowCardinality(Nullable(String)), + `save_to_locker` Nullable(Bool), + `error_message` Nullable(String), + `offer_amount` Nullable(UInt32), + `surcharge_amount` Nullable(UInt32), + `tax_amount` Nullable(UInt32), + `payment_method_id` Nullable(String), + `payment_method` LowCardinality(Nullable(String)), + `payment_method_type` LowCardinality(Nullable(String)), + `connector_transaction_id` Nullable(String), + `capture_method` Nullable(String), + `capture_on` Nullable(DateTime) CODEC(T64, LZ4), + `confirm` Bool, + `authentication_type` LowCardinality(Nullable(String)), + `cancellation_reason` Nullable(String), + `amount_to_capture` Nullable(UInt32), + `mandate_id` Nullable(String), + `browser_info` Nullable(String), + `error_code` Nullable(String), + `connector_metadata` Nullable(String), + `payment_experience` Nullable(String), + `created_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `last_synced` Nullable(DateTime) CODEC(T64, LZ4), + `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `sign_flag` Int8 +) ENGINE = Distributed('{cluster}', 'hyperswitch', 'payment_attempt_clustered', cityHash64(attempt_id)); + + + +CREATE MATERIALIZED VIEW hyperswitch.payment_attempt_mv on cluster '{cluster}' TO hyperswitch.payment_attempt_dist ( + `payment_id` String, + `merchant_id` String, + `attempt_id` String, + `status` LowCardinality(String), + `amount` Nullable(UInt32), + `currency` LowCardinality(Nullable(String)), + `connector` LowCardinality(Nullable(String)), + `save_to_locker` Nullable(Bool), + `error_message` Nullable(String), + `offer_amount` Nullable(UInt32), + `surcharge_amount` Nullable(UInt32), + `tax_amount` Nullable(UInt32), + `payment_method_id` Nullable(String), + `payment_method` LowCardinality(Nullable(String)), + `payment_method_type` LowCardinality(Nullable(String)), + `connector_transaction_id` Nullable(String), + `capture_method` Nullable(String), + `confirm` Bool, + `authentication_type` LowCardinality(Nullable(String)), + `cancellation_reason` Nullable(String), + `amount_to_capture` Nullable(UInt32), + `mandate_id` Nullable(String), + `browser_info` Nullable(String), + `error_code` Nullable(String), + `connector_metadata` Nullable(String), + `payment_experience` Nullable(String), + `created_at` DateTime64(3), + `capture_on` Nullable(DateTime64(3)), + `last_synced` Nullable(DateTime64(3)), + `modified_at` DateTime64(3), + `inserted_at` DateTime64(3), + `sign_flag` Int8 +) AS +SELECT + payment_id, + merchant_id, + attempt_id, + status, + amount, + currency, + connector, + save_to_locker, + error_message, + offer_amount, + surcharge_amount, + tax_amount, + payment_method_id, + payment_method, + payment_method_type, + connector_transaction_id, + capture_method, + confirm, + authentication_type, + cancellation_reason, + amount_to_capture, + mandate_id, + browser_info, + error_code, + connector_metadata, + payment_experience, + created_at, + capture_on, + last_synced, + modified_at, + now() as inserted_at, + sign_flag +FROM + hyperswitch.payment_attempt_queue +WHERE length(_error) = 0; + + +CREATE TABLE hyperswitch.payment_attempt_clustered on cluster '{cluster}' ( + `payment_id` String, + `merchant_id` String, + `attempt_id` String, + `status` LowCardinality(String), + `amount` Nullable(UInt32), + `currency` LowCardinality(Nullable(String)), + `connector` LowCardinality(Nullable(String)), + `save_to_locker` Nullable(Bool), + `error_message` Nullable(String), + `offer_amount` Nullable(UInt32), + `surcharge_amount` Nullable(UInt32), + `tax_amount` Nullable(UInt32), + `payment_method_id` Nullable(String), + `payment_method` LowCardinality(Nullable(String)), + `payment_method_type` LowCardinality(Nullable(String)), + `connector_transaction_id` Nullable(String), + `capture_method` Nullable(String), + `capture_on` Nullable(DateTime) CODEC(T64, LZ4), + `confirm` Bool, + `authentication_type` LowCardinality(Nullable(String)), + `cancellation_reason` Nullable(String), + `amount_to_capture` Nullable(UInt32), + `mandate_id` Nullable(String), + `browser_info` Nullable(String), + `error_code` Nullable(String), + `connector_metadata` Nullable(String), + `payment_experience` Nullable(String), + `created_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `last_synced` Nullable(DateTime) CODEC(T64, LZ4), + `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `sign_flag` Int8, + INDEX connectorIndex connector TYPE bloom_filter GRANULARITY 1, + INDEX paymentMethodIndex payment_method TYPE bloom_filter GRANULARITY 1, + INDEX authenticationTypeIndex authentication_type TYPE bloom_filter GRANULARITY 1, + INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1, + INDEX statusIndex status TYPE bloom_filter GRANULARITY 1 +) ENGINE = ReplicatedCollapsingMergeTree( + '/clickhouse/{installation}/{cluster}/tables/{shard}/hyperswitch/payment_attempt_clustered', + '{replica}', + sign_flag +) +PARTITION BY toStartOfDay(created_at) +ORDER BY + (created_at, merchant_id, attempt_id) +TTL created_at + toIntervalMonth(6) +; + +CREATE MATERIALIZED VIEW hyperswitch.payment_attempt_parse_errors on cluster '{cluster}' +( + `topic` String, + `partition` Int64, + `offset` Int64, + `raw` String, + `error` String +) +ENGINE = MergeTree +ORDER BY (topic, partition, offset) +SETTINGS index_granularity = 8192 AS +SELECT + _topic AS topic, + _partition AS partition, + _offset AS offset, + _raw_message AS raw, + _error AS error +FROM hyperswitch.payment_attempt_queue +WHERE length(_error) > 0 +; \ No newline at end of file diff --git a/crates/analytics/docs/clickhouse/cluster_setup/scripts/payment_intents.sql b/crates/analytics/docs/clickhouse/cluster_setup/scripts/payment_intents.sql new file mode 100644 index 000000000000..eb2d83140e92 --- /dev/null +++ b/crates/analytics/docs/clickhouse/cluster_setup/scripts/payment_intents.sql @@ -0,0 +1,165 @@ +CREATE TABLE hyperswitch.payment_intents_queue on cluster '{cluster}' ( + `payment_id` String, + `merchant_id` String, + `status` LowCardinality(String), + `amount` UInt32, + `currency` LowCardinality(Nullable(String)), + `amount_captured` Nullable(UInt32), + `customer_id` Nullable(String), + `description` Nullable(String), + `return_url` Nullable(String), + `connector_id` LowCardinality(Nullable(String)), + `statement_descriptor_name` Nullable(String), + `statement_descriptor_suffix` Nullable(String), + `setup_future_usage` LowCardinality(Nullable(String)), + `off_session` Nullable(Bool), + `client_secret` Nullable(String), + `active_attempt_id` String, + `business_country` String, + `business_label` String, + `modified_at` DateTime, + `created_at` DateTime, + `last_synced` Nullable(DateTime) CODEC(T64, LZ4), + `sign_flag` Int8 +) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092', +kafka_topic_list = 'hyperswitch-payment-intent-events', +kafka_group_name = 'hyper-c1', +kafka_format = 'JSONEachRow', +kafka_handle_error_mode = 'stream'; + +CREATE TABLE hyperswitch.payment_intents_dist on cluster '{cluster}' ( + `payment_id` String, + `merchant_id` String, + `status` LowCardinality(String), + `amount` UInt32, + `currency` LowCardinality(Nullable(String)), + `amount_captured` Nullable(UInt32), + `customer_id` Nullable(String), + `description` Nullable(String), + `return_url` Nullable(String), + `connector_id` LowCardinality(Nullable(String)), + `statement_descriptor_name` Nullable(String), + `statement_descriptor_suffix` Nullable(String), + `setup_future_usage` LowCardinality(Nullable(String)), + `off_session` Nullable(Bool), + `client_secret` Nullable(String), + `active_attempt_id` String, + `business_country` LowCardinality(String), + `business_label` String, + `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `created_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `last_synced` Nullable(DateTime) CODEC(T64, LZ4), + `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `sign_flag` Int8 +) ENGINE = Distributed('{cluster}', 'hyperswitch', 'payment_intents_clustered', cityHash64(payment_id)); + +CREATE TABLE hyperswitch.payment_intents_clustered on cluster '{cluster}' ( + `payment_id` String, + `merchant_id` String, + `status` LowCardinality(String), + `amount` UInt32, + `currency` LowCardinality(Nullable(String)), + `amount_captured` Nullable(UInt32), + `customer_id` Nullable(String), + `description` Nullable(String), + `return_url` Nullable(String), + `connector_id` LowCardinality(Nullable(String)), + `statement_descriptor_name` Nullable(String), + `statement_descriptor_suffix` Nullable(String), + `setup_future_usage` LowCardinality(Nullable(String)), + `off_session` Nullable(Bool), + `client_secret` Nullable(String), + `active_attempt_id` String, + `business_country` LowCardinality(String), + `business_label` String, + `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `created_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `last_synced` Nullable(DateTime) CODEC(T64, LZ4), + `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `sign_flag` Int8, + INDEX connectorIndex connector_id TYPE bloom_filter GRANULARITY 1, + INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1, + INDEX statusIndex status TYPE bloom_filter GRANULARITY 1 +) ENGINE = ReplicatedCollapsingMergeTree( + '/clickhouse/{installation}/{cluster}/tables/{shard}/hyperswitch/payment_intents_clustered', + '{replica}', + sign_flag +) +PARTITION BY toStartOfDay(created_at) +ORDER BY + (created_at, merchant_id, payment_id) +TTL created_at + toIntervalMonth(6) +; + +CREATE MATERIALIZED VIEW hyperswitch.payment_intent_mv on cluster '{cluster}' TO hyperswitch.payment_intents_dist ( + `payment_id` String, + `merchant_id` String, + `status` LowCardinality(String), + `amount` UInt32, + `currency` LowCardinality(Nullable(String)), + `amount_captured` Nullable(UInt32), + `customer_id` Nullable(String), + `description` Nullable(String), + `return_url` Nullable(String), + `connector_id` LowCardinality(Nullable(String)), + `statement_descriptor_name` Nullable(String), + `statement_descriptor_suffix` Nullable(String), + `setup_future_usage` LowCardinality(Nullable(String)), + `off_session` Nullable(Bool), + `client_secret` Nullable(String), + `active_attempt_id` String, + `business_country` LowCardinality(String), + `business_label` String, + `modified_at` DateTime64(3), + `created_at` DateTime64(3), + `last_synced` Nullable(DateTime64(3)), + `inserted_at` DateTime64(3), + `sign_flag` Int8 +) AS +SELECT + payment_id, + merchant_id, + status, + amount, + currency, + amount_captured, + customer_id, + description, + return_url, + connector_id, + statement_descriptor_name, + statement_descriptor_suffix, + setup_future_usage, + off_session, + client_secret, + active_attempt_id, + business_country, + business_label, + modified_at, + created_at, + last_synced, + now() as inserted_at, + sign_flag +FROM hyperswitch.payment_intents_queue +WHERE length(_error) = 0; + +CREATE MATERIALIZED VIEW hyperswitch.payment_intent_parse_errors on cluster '{cluster}' +( + `topic` String, + `partition` Int64, + `offset` Int64, + `raw` String, + `error` String +) +ENGINE = MergeTree +ORDER BY (topic, partition, offset) +SETTINGS index_granularity = 8192 AS +SELECT + _topic AS topic, + _partition AS partition, + _offset AS offset, + _raw_message AS raw, + _error AS error +FROM hyperswitch.payment_intents_queue +WHERE length(_error) > 0 +; diff --git a/crates/analytics/docs/clickhouse/cluster_setup/scripts/refund_analytics.sql b/crates/analytics/docs/clickhouse/cluster_setup/scripts/refund_analytics.sql new file mode 100644 index 000000000000..bf5f6e0e2405 --- /dev/null +++ b/crates/analytics/docs/clickhouse/cluster_setup/scripts/refund_analytics.sql @@ -0,0 +1,173 @@ +CREATE TABLE hyperswitch.refund_queue on cluster '{cluster}' ( + `internal_reference_id` String, + `refund_id` String, + `payment_id` String, + `merchant_id` String, + `connector_transaction_id` String, + `connector` LowCardinality(Nullable(String)), + `connector_refund_id` Nullable(String), + `external_reference_id` Nullable(String), + `refund_type` LowCardinality(String), + `total_amount` Nullable(UInt32), + `currency` LowCardinality(String), + `refund_amount` Nullable(UInt32), + `refund_status` LowCardinality(String), + `sent_to_gateway` Bool, + `refund_error_message` Nullable(String), + `refund_arn` Nullable(String), + `attempt_id` String, + `description` Nullable(String), + `refund_reason` Nullable(String), + `refund_error_code` Nullable(String), + `created_at` DateTime, + `modified_at` DateTime, + `sign_flag` Int8 +) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092', +kafka_topic_list = 'hyperswitch-refund-events', +kafka_group_name = 'hyper-c1', +kafka_format = 'JSONEachRow', +kafka_handle_error_mode = 'stream'; + +CREATE TABLE hyperswitch.refund_dist on cluster '{cluster}' ( + `internal_reference_id` String, + `refund_id` String, + `payment_id` String, + `merchant_id` String, + `connector_transaction_id` String, + `connector` LowCardinality(Nullable(String)), + `connector_refund_id` Nullable(String), + `external_reference_id` Nullable(String), + `refund_type` LowCardinality(String), + `total_amount` Nullable(UInt32), + `currency` LowCardinality(String), + `refund_amount` Nullable(UInt32), + `refund_status` LowCardinality(String), + `sent_to_gateway` Bool, + `refund_error_message` Nullable(String), + `refund_arn` Nullable(String), + `attempt_id` String, + `description` Nullable(String), + `refund_reason` Nullable(String), + `refund_error_code` Nullable(String), + `created_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `sign_flag` Int8 +) ENGINE = Distributed('{cluster}', 'hyperswitch', 'refund_clustered', cityHash64(refund_id)); + + + +CREATE TABLE hyperswitch.refund_clustered on cluster '{cluster}' ( + `internal_reference_id` String, + `refund_id` String, + `payment_id` String, + `merchant_id` String, + `connector_transaction_id` String, + `connector` LowCardinality(Nullable(String)), + `connector_refund_id` Nullable(String), + `external_reference_id` Nullable(String), + `refund_type` LowCardinality(String), + `total_amount` Nullable(UInt32), + `currency` LowCardinality(String), + `refund_amount` Nullable(UInt32), + `refund_status` LowCardinality(String), + `sent_to_gateway` Bool, + `refund_error_message` Nullable(String), + `refund_arn` Nullable(String), + `attempt_id` String, + `description` Nullable(String), + `refund_reason` Nullable(String), + `refund_error_code` Nullable(String), + `created_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `sign_flag` Int8, + INDEX connectorIndex connector TYPE bloom_filter GRANULARITY 1, + INDEX refundTypeIndex refund_type TYPE bloom_filter GRANULARITY 1, + INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1, + INDEX statusIndex refund_status TYPE bloom_filter GRANULARITY 1 +) ENGINE = ReplicatedCollapsingMergeTree( + '/clickhouse/{installation}/{cluster}/tables/{shard}/hyperswitch/refund_clustered', + '{replica}', + sign_flag +) +PARTITION BY toStartOfDay(created_at) +ORDER BY + (created_at, merchant_id, refund_id) +TTL created_at + toIntervalMonth(6) +; + +CREATE MATERIALIZED VIEW hyperswitch.kafka_parse_refund on cluster '{cluster}' TO hyperswitch.refund_dist ( + `internal_reference_id` String, + `refund_id` String, + `payment_id` String, + `merchant_id` String, + `connector_transaction_id` String, + `connector` LowCardinality(Nullable(String)), + `connector_refund_id` Nullable(String), + `external_reference_id` Nullable(String), + `refund_type` LowCardinality(String), + `total_amount` Nullable(UInt32), + `currency` LowCardinality(String), + `refund_amount` Nullable(UInt32), + `refund_status` LowCardinality(String), + `sent_to_gateway` Bool, + `refund_error_message` Nullable(String), + `refund_arn` Nullable(String), + `attempt_id` String, + `description` Nullable(String), + `refund_reason` Nullable(String), + `refund_error_code` Nullable(String), + `created_at` DateTime64(3), + `modified_at` DateTime64(3), + `inserted_at` DateTime64(3), + `sign_flag` Int8 +) AS +SELECT + internal_reference_id, + refund_id, + payment_id, + merchant_id, + connector_transaction_id, + connector, + connector_refund_id, + external_reference_id, + refund_type, + total_amount, + currency, + refund_amount, + refund_status, + sent_to_gateway, + refund_error_message, + refund_arn, + attempt_id, + description, + refund_reason, + refund_error_code, + created_at, + modified_at, + now() as inserted_at, + sign_flag +FROM hyperswitch.refund_queue +WHERE length(_error) = 0; + +CREATE MATERIALIZED VIEW hyperswitch.refund_parse_errors on cluster '{cluster}' +( + `topic` String, + `partition` Int64, + `offset` Int64, + `raw` String, + `error` String +) +ENGINE = MergeTree +ORDER BY (topic, partition, offset) +SETTINGS index_granularity = 8192 AS +SELECT + _topic AS topic, + _partition AS partition, + _offset AS offset, + _raw_message AS raw, + _error AS error +FROM hyperswitch.refund_queue +WHERE length(_error) > 0 +; \ No newline at end of file diff --git a/crates/analytics/docs/clickhouse/cluster_setup/scripts/sdk_events.sql b/crates/analytics/docs/clickhouse/cluster_setup/scripts/sdk_events.sql new file mode 100644 index 000000000000..37766392bc70 --- /dev/null +++ b/crates/analytics/docs/clickhouse/cluster_setup/scripts/sdk_events.sql @@ -0,0 +1,156 @@ +CREATE TABLE hyperswitch.sdk_events_queue on cluster '{cluster}' ( + `payment_id` Nullable(String), + `merchant_id` String, + `remote_ip` Nullable(String), + `log_type` LowCardinality(Nullable(String)), + `event_name` LowCardinality(Nullable(String)), + `first_event` LowCardinality(Nullable(String)), + `latency` Nullable(UInt32), + `timestamp` String, + `browser_name` LowCardinality(Nullable(String)), + `browser_version` Nullable(String), + `platform` LowCardinality(Nullable(String)), + `source` LowCardinality(Nullable(String)), + `category` LowCardinality(Nullable(String)), + `version` LowCardinality(Nullable(String)), + `value` Nullable(String), + `component` LowCardinality(Nullable(String)), + `payment_method` LowCardinality(Nullable(String)), + `payment_experience` LowCardinality(Nullable(String)) +) ENGINE = Kafka SETTINGS + kafka_broker_list = 'hyper-c1-kafka-brokers.kafka-cluster.svc.cluster.local:9092', + kafka_topic_list = 'hyper-sdk-logs', + kafka_group_name = 'hyper-c1', + kafka_format = 'JSONEachRow', + kafka_handle_error_mode = 'stream'; + +CREATE TABLE hyperswitch.sdk_events_clustered on cluster '{cluster}' ( + `payment_id` Nullable(String), + `merchant_id` String, + `remote_ip` Nullable(String), + `log_type` LowCardinality(Nullable(String)), + `event_name` LowCardinality(Nullable(String)), + `first_event` Bool DEFAULT 1, + `browser_name` LowCardinality(Nullable(String)), + `browser_version` Nullable(String), + `platform` LowCardinality(Nullable(String)), + `source` LowCardinality(Nullable(String)), + `category` LowCardinality(Nullable(String)), + `version` LowCardinality(Nullable(String)), + `value` Nullable(String), + `component` LowCardinality(Nullable(String)), + `payment_method` LowCardinality(Nullable(String)), + `payment_experience` LowCardinality(Nullable(String)) DEFAULT '', + `created_at` DateTime64(3) DEFAULT now64() CODEC(T64, LZ4), + `inserted_at` DateTime64(3) DEFAULT now64() CODEC(T64, LZ4), + `latency` Nullable(UInt32) DEFAULT 0, + INDEX paymentMethodIndex payment_method TYPE bloom_filter GRANULARITY 1, + INDEX eventIndex event_name TYPE bloom_filter GRANULARITY 1, + INDEX platformIndex platform TYPE bloom_filter GRANULARITY 1, + INDEX logTypeIndex log_type TYPE bloom_filter GRANULARITY 1, + INDEX categoryIndex category TYPE bloom_filter GRANULARITY 1, + INDEX sourceIndex source TYPE bloom_filter GRANULARITY 1, + INDEX componentIndex component TYPE bloom_filter GRANULARITY 1, + INDEX firstEventIndex first_event TYPE bloom_filter GRANULARITY 1 +) ENGINE = ReplicatedMergeTree( + '/clickhouse/{installation}/{cluster}/tables/{shard}/hyperswitch/sdk_events_clustered', '{replica}' +) +PARTITION BY + toStartOfDay(created_at) +ORDER BY + (created_at, merchant_id) +TTL + toDateTime(created_at) + toIntervalMonth(6) +SETTINGS + index_granularity = 8192 +; + +CREATE TABLE hyperswitch.sdk_events_dist on cluster '{cluster}' ( + `payment_id` Nullable(String), + `merchant_id` String, + `remote_ip` Nullable(String), + `log_type` LowCardinality(Nullable(String)), + `event_name` LowCardinality(Nullable(String)), + `first_event` Bool DEFAULT 1, + `browser_name` LowCardinality(Nullable(String)), + `browser_version` Nullable(String), + `platform` LowCardinality(Nullable(String)), + `source` LowCardinality(Nullable(String)), + `category` LowCardinality(Nullable(String)), + `version` LowCardinality(Nullable(String)), + `value` Nullable(String), + `component` LowCardinality(Nullable(String)), + `payment_method` LowCardinality(Nullable(String)), + `payment_experience` LowCardinality(Nullable(String)) DEFAULT '', + `created_at` DateTime64(3) DEFAULT now64() CODEC(T64, LZ4), + `inserted_at` DateTime64(3) DEFAULT now64() CODEC(T64, LZ4), + `latency` Nullable(UInt32) DEFAULT 0 +) ENGINE = Distributed( + '{cluster}', 'hyperswitch', 'sdk_events_clustered', rand() +); + +CREATE MATERIALIZED VIEW hyperswitch.sdk_events_mv on cluster '{cluster}' TO hyperswitch.sdk_events_dist ( + `payment_id` Nullable(String), + `merchant_id` String, + `remote_ip` Nullable(String), + `log_type` LowCardinality(Nullable(String)), + `event_name` LowCardinality(Nullable(String)), + `first_event` Bool, + `latency` Nullable(UInt32), + `browser_name` LowCardinality(Nullable(String)), + `browser_version` Nullable(String), + `platform` LowCardinality(Nullable(String)), + `source` LowCardinality(Nullable(String)), + `category` LowCardinality(Nullable(String)), + `version` LowCardinality(Nullable(String)), + `value` Nullable(String), + `component` LowCardinality(Nullable(String)), + `payment_method` LowCardinality(Nullable(String)), + `payment_experience` LowCardinality(Nullable(String)), + `created_at` DateTime64(3) +) AS +SELECT + payment_id, + merchant_id, + remote_ip, + log_type, + event_name, + multiIf(first_event = 'true', 1, 0) AS first_event, + latency, + browser_name, + browser_version, + platform, + source, + category, + version, + value, + component, + payment_method, + payment_experience, + toDateTime64(timestamp, 3) AS created_at +FROM + hyperswitch.sdk_events_queue +WHERE length(_error) = 0 +; + +CREATE MATERIALIZED VIEW hyperswitch.sdk_parse_errors on cluster '{cluster}' ( + `topic` String, + `partition` Int64, + `offset` Int64, + `raw` String, + `error` String +) ENGINE = MergeTree + ORDER BY (topic, partition, offset) +SETTINGS + index_granularity = 8192 AS +SELECT + _topic AS topic, + _partition AS partition, + _offset AS offset, + _raw_message AS raw, + _error AS error +FROM + hyperswitch.sdk_events_queue +WHERE + length(_error) > 0 +; diff --git a/crates/analytics/docs/clickhouse/cluster_setup/scripts/seed_scripts.sql b/crates/analytics/docs/clickhouse/cluster_setup/scripts/seed_scripts.sql new file mode 100644 index 000000000000..202b94ac6040 --- /dev/null +++ b/crates/analytics/docs/clickhouse/cluster_setup/scripts/seed_scripts.sql @@ -0,0 +1 @@ +create database hyperswitch on cluster '{cluster}'; \ No newline at end of file diff --git a/crates/analytics/docs/clickhouse/scripts/api_events_v2.sql b/crates/analytics/docs/clickhouse/scripts/api_events_v2.sql new file mode 100644 index 000000000000..b41a75fe67e5 --- /dev/null +++ b/crates/analytics/docs/clickhouse/scripts/api_events_v2.sql @@ -0,0 +1,134 @@ +CREATE TABLE api_events_v2_queue ( + `merchant_id` String, + `payment_id` Nullable(String), + `refund_id` Nullable(String), + `payment_method_id` Nullable(String), + `payment_method` Nullable(String), + `payment_method_type` Nullable(String), + `customer_id` Nullable(String), + `user_id` Nullable(String), + `connector` Nullable(String), + `request_id` String, + `flow_type` LowCardinality(String), + `api_flow` LowCardinality(String), + `api_auth_type` LowCardinality(String), + `request` String, + `response` Nullable(String), + `authentication_data` Nullable(String), + `status_code` UInt32, + `created_at` DateTime CODEC(T64, LZ4), + `latency` UInt128, + `user_agent` String, + `ip_addr` String, +) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092', +kafka_topic_list = 'hyperswitch-api-log-events', +kafka_group_name = 'hyper-c1', +kafka_format = 'JSONEachRow', +kafka_handle_error_mode = 'stream'; + + +CREATE TABLE api_events_v2_dist ( + `merchant_id` String, + `payment_id` Nullable(String), + `refund_id` Nullable(String), + `payment_method_id` Nullable(String), + `payment_method` Nullable(String), + `payment_method_type` Nullable(String), + `customer_id` Nullable(String), + `user_id` Nullable(String), + `connector` Nullable(String), + `request_id` String, + `flow_type` LowCardinality(String), + `api_flow` LowCardinality(String), + `api_auth_type` LowCardinality(String), + `request` String, + `response` Nullable(String), + `authentication_data` Nullable(String), + `status_code` UInt32, + `created_at` DateTime CODEC(T64, LZ4), + `inserted_at` DateTime CODEC(T64, LZ4), + `latency` UInt128, + `user_agent` String, + `ip_addr` String, + INDEX flowIndex flow_type TYPE bloom_filter GRANULARITY 1, + INDEX apiIndex api_flow TYPE bloom_filter GRANULARITY 1, + INDEX statusIndex status_code TYPE bloom_filter GRANULARITY 1 +) ENGINE = MergeTree +PARTITION BY toStartOfDay(created_at) +ORDER BY + (created_at, merchant_id, flow_type, status_code, api_flow) +TTL created_at + toIntervalMonth(6) +; + +CREATE MATERIALIZED VIEW api_events_v2_mv TO api_events_v2_dist ( + `merchant_id` String, + `payment_id` Nullable(String), + `refund_id` Nullable(String), + `payment_method_id` Nullable(String), + `payment_method` Nullable(String), + `payment_method_type` Nullable(String), + `customer_id` Nullable(String), + `user_id` Nullable(String), + `connector` Nullable(String), + `request_id` String, + `flow_type` LowCardinality(String), + `api_flow` LowCardinality(String), + `api_auth_type` LowCardinality(String), + `request` String, + `response` Nullable(String), + `authentication_data` Nullable(String), + `status_code` UInt32, + `created_at` DateTime CODEC(T64, LZ4), + `inserted_at` DateTime CODEC(T64, LZ4), + `latency` UInt128, + `user_agent` String, + `ip_addr` String +) AS +SELECT + merchant_id, + payment_id, + refund_id, + payment_method_id, + payment_method, + payment_method_type, + customer_id, + user_id, + connector, + request_id, + flow_type, + api_flow, + api_auth_type, + request, + response, + authentication_data, + status_code, + created_at, + now() as inserted_at, + latency, + user_agent, + ip_addr +FROM + api_events_v2_queue +where length(_error) = 0; + + +CREATE MATERIALIZED VIEW api_events_parse_errors +( + `topic` String, + `partition` Int64, + `offset` Int64, + `raw` String, + `error` String +) +ENGINE = MergeTree +ORDER BY (topic, partition, offset) +SETTINGS index_granularity = 8192 AS +SELECT + _topic AS topic, + _partition AS partition, + _offset AS offset, + _raw_message AS raw, + _error AS error +FROM api_events_v2_queue +WHERE length(_error) > 0 +; diff --git a/crates/analytics/docs/clickhouse/scripts/payment_attempts.sql b/crates/analytics/docs/clickhouse/scripts/payment_attempts.sql new file mode 100644 index 000000000000..276e311e57a9 --- /dev/null +++ b/crates/analytics/docs/clickhouse/scripts/payment_attempts.sql @@ -0,0 +1,156 @@ +CREATE TABLE payment_attempts_queue ( + `payment_id` String, + `merchant_id` String, + `attempt_id` String, + `status` LowCardinality(String), + `amount` Nullable(UInt32), + `currency` LowCardinality(Nullable(String)), + `connector` LowCardinality(Nullable(String)), + `save_to_locker` Nullable(Bool), + `error_message` Nullable(String), + `offer_amount` Nullable(UInt32), + `surcharge_amount` Nullable(UInt32), + `tax_amount` Nullable(UInt32), + `payment_method_id` Nullable(String), + `payment_method` LowCardinality(Nullable(String)), + `payment_method_type` LowCardinality(Nullable(String)), + `connector_transaction_id` Nullable(String), + `capture_method` LowCardinality(Nullable(String)), + `capture_on` Nullable(DateTime) CODEC(T64, LZ4), + `confirm` Bool, + `authentication_type` LowCardinality(Nullable(String)), + `cancellation_reason` Nullable(String), + `amount_to_capture` Nullable(UInt32), + `mandate_id` Nullable(String), + `browser_info` Nullable(String), + `error_code` Nullable(String), + `connector_metadata` Nullable(String), + `payment_experience` Nullable(String), + `created_at` DateTime CODEC(T64, LZ4), + `last_synced` Nullable(DateTime) CODEC(T64, LZ4), + `modified_at` DateTime CODEC(T64, LZ4), + `sign_flag` Int8 +) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092', +kafka_topic_list = 'hyperswitch-payment-attempt-events', +kafka_group_name = 'hyper-c1', +kafka_format = 'JSONEachRow', +kafka_handle_error_mode = 'stream'; + +CREATE TABLE payment_attempt_dist ( + `payment_id` String, + `merchant_id` String, + `attempt_id` String, + `status` LowCardinality(String), + `amount` Nullable(UInt32), + `currency` LowCardinality(Nullable(String)), + `connector` LowCardinality(Nullable(String)), + `save_to_locker` Nullable(Bool), + `error_message` Nullable(String), + `offer_amount` Nullable(UInt32), + `surcharge_amount` Nullable(UInt32), + `tax_amount` Nullable(UInt32), + `payment_method_id` Nullable(String), + `payment_method` LowCardinality(Nullable(String)), + `payment_method_type` LowCardinality(Nullable(String)), + `connector_transaction_id` Nullable(String), + `capture_method` Nullable(String), + `capture_on` Nullable(DateTime) CODEC(T64, LZ4), + `confirm` Bool, + `authentication_type` LowCardinality(Nullable(String)), + `cancellation_reason` Nullable(String), + `amount_to_capture` Nullable(UInt32), + `mandate_id` Nullable(String), + `browser_info` Nullable(String), + `error_code` Nullable(String), + `connector_metadata` Nullable(String), + `payment_experience` Nullable(String), + `created_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `last_synced` Nullable(DateTime) CODEC(T64, LZ4), + `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `sign_flag` Int8, + INDEX connectorIndex connector TYPE bloom_filter GRANULARITY 1, + INDEX paymentMethodIndex payment_method TYPE bloom_filter GRANULARITY 1, + INDEX authenticationTypeIndex authentication_type TYPE bloom_filter GRANULARITY 1, + INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1, + INDEX statusIndex status TYPE bloom_filter GRANULARITY 1 +) ENGINE = CollapsingMergeTree( + sign_flag +) +PARTITION BY toStartOfDay(created_at) +ORDER BY + (created_at, merchant_id, attempt_id) +TTL created_at + toIntervalMonth(6) +; + + +CREATE MATERIALIZED VIEW kafka_parse_pa TO payment_attempt_dist ( + `payment_id` String, + `merchant_id` String, + `attempt_id` String, + `status` LowCardinality(String), + `amount` Nullable(UInt32), + `currency` LowCardinality(Nullable(String)), + `connector` LowCardinality(Nullable(String)), + `save_to_locker` Nullable(Bool), + `error_message` Nullable(String), + `offer_amount` Nullable(UInt32), + `surcharge_amount` Nullable(UInt32), + `tax_amount` Nullable(UInt32), + `payment_method_id` Nullable(String), + `payment_method` LowCardinality(Nullable(String)), + `payment_method_type` LowCardinality(Nullable(String)), + `connector_transaction_id` Nullable(String), + `capture_method` Nullable(String), + `confirm` Bool, + `authentication_type` LowCardinality(Nullable(String)), + `cancellation_reason` Nullable(String), + `amount_to_capture` Nullable(UInt32), + `mandate_id` Nullable(String), + `browser_info` Nullable(String), + `error_code` Nullable(String), + `connector_metadata` Nullable(String), + `payment_experience` Nullable(String), + `created_at` DateTime64(3), + `capture_on` Nullable(DateTime64(3)), + `last_synced` Nullable(DateTime64(3)), + `modified_at` DateTime64(3), + `inserted_at` DateTime64(3), + `sign_flag` Int8 +) AS +SELECT + payment_id, + merchant_id, + attempt_id, + status, + amount, + currency, + connector, + save_to_locker, + error_message, + offer_amount, + surcharge_amount, + tax_amount, + payment_method_id, + payment_method, + payment_method_type, + connector_transaction_id, + capture_method, + confirm, + authentication_type, + cancellation_reason, + amount_to_capture, + mandate_id, + browser_info, + error_code, + connector_metadata, + payment_experience, + created_at, + capture_on, + last_synced, + modified_at, + now() as inserted_at, + sign_flag +FROM + payment_attempts_queue; + diff --git a/crates/analytics/docs/clickhouse/scripts/payment_intents.sql b/crates/analytics/docs/clickhouse/scripts/payment_intents.sql new file mode 100644 index 000000000000..8cd487f364b4 --- /dev/null +++ b/crates/analytics/docs/clickhouse/scripts/payment_intents.sql @@ -0,0 +1,116 @@ +CREATE TABLE payment_intents_queue ( + `payment_id` String, + `merchant_id` String, + `status` LowCardinality(String), + `amount` UInt32, + `currency` LowCardinality(Nullable(String)), + `amount_captured` Nullable(UInt32), + `customer_id` Nullable(String), + `description` Nullable(String), + `return_url` Nullable(String), + `connector_id` LowCardinality(Nullable(String)), + `statement_descriptor_name` Nullable(String), + `statement_descriptor_suffix` Nullable(String), + `setup_future_usage` LowCardinality(Nullable(String)), + `off_session` Nullable(Bool), + `client_secret` Nullable(String), + `active_attempt_id` String, + `business_country` String, + `business_label` String, + `modified_at` DateTime CODEC(T64, LZ4), + `created_at` DateTime CODEC(T64, LZ4), + `last_synced` Nullable(DateTime) CODEC(T64, LZ4), + `sign_flag` Int8 +) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092', +kafka_topic_list = 'hyperswitch-payment-intent-events', +kafka_group_name = 'hyper-c1', +kafka_format = 'JSONEachRow', +kafka_handle_error_mode = 'stream'; + + +CREATE TABLE payment_intents_dist ( + `payment_id` String, + `merchant_id` String, + `status` LowCardinality(String), + `amount` UInt32, + `currency` LowCardinality(Nullable(String)), + `amount_captured` Nullable(UInt32), + `customer_id` Nullable(String), + `description` Nullable(String), + `return_url` Nullable(String), + `connector_id` LowCardinality(Nullable(String)), + `statement_descriptor_name` Nullable(String), + `statement_descriptor_suffix` Nullable(String), + `setup_future_usage` LowCardinality(Nullable(String)), + `off_session` Nullable(Bool), + `client_secret` Nullable(String), + `active_attempt_id` String, + `business_country` LowCardinality(String), + `business_label` String, + `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `created_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `last_synced` Nullable(DateTime) CODEC(T64, LZ4), + `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `sign_flag` Int8, + INDEX connectorIndex connector_id TYPE bloom_filter GRANULARITY 1, + INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1, + INDEX statusIndex status TYPE bloom_filter GRANULARITY 1 +) ENGINE = CollapsingMergeTree( + sign_flag +) +PARTITION BY toStartOfDay(created_at) +ORDER BY + (created_at, merchant_id, payment_id) +TTL created_at + toIntervalMonth(6) +; + +CREATE MATERIALIZED VIEW kafka_parse_payment_intent TO payment_intents_dist ( + `payment_id` String, + `merchant_id` String, + `status` LowCardinality(String), + `amount` UInt32, + `currency` LowCardinality(Nullable(String)), + `amount_captured` Nullable(UInt32), + `customer_id` Nullable(String), + `description` Nullable(String), + `return_url` Nullable(String), + `connector_id` LowCardinality(Nullable(String)), + `statement_descriptor_name` Nullable(String), + `statement_descriptor_suffix` Nullable(String), + `setup_future_usage` LowCardinality(Nullable(String)), + `off_session` Nullable(Bool), + `client_secret` Nullable(String), + `active_attempt_id` String, + `business_country` LowCardinality(String), + `business_label` String, + `modified_at` DateTime64(3), + `created_at` DateTime64(3), + `last_synced` Nullable(DateTime64(3)), + `inserted_at` DateTime64(3), + `sign_flag` Int8 +) AS +SELECT + payment_id, + merchant_id, + status, + amount, + currency, + amount_captured, + customer_id, + description, + return_url, + connector_id, + statement_descriptor_name, + statement_descriptor_suffix, + setup_future_usage, + off_session, + client_secret, + active_attempt_id, + business_country, + business_label, + modified_at, + created_at, + last_synced, + now() as inserted_at, + sign_flag +FROM payment_intents_queue; diff --git a/crates/analytics/docs/clickhouse/scripts/refunds.sql b/crates/analytics/docs/clickhouse/scripts/refunds.sql new file mode 100644 index 000000000000..a131270c1326 --- /dev/null +++ b/crates/analytics/docs/clickhouse/scripts/refunds.sql @@ -0,0 +1,121 @@ +CREATE TABLE refund_queue ( + `internal_reference_id` String, + `refund_id` String, + `payment_id` String, + `merchant_id` String, + `connector_transaction_id` String, + `connector` LowCardinality(Nullable(String)), + `connector_refund_id` Nullable(String), + `external_reference_id` Nullable(String), + `refund_type` LowCardinality(String), + `total_amount` Nullable(UInt32), + `currency` LowCardinality(String), + `refund_amount` Nullable(UInt32), + `refund_status` LowCardinality(String), + `sent_to_gateway` Bool, + `refund_error_message` Nullable(String), + `refund_arn` Nullable(String), + `attempt_id` String, + `description` Nullable(String), + `refund_reason` Nullable(String), + `refund_error_code` Nullable(String), + `created_at` DateTime CODEC(T64, LZ4), + `modified_at` DateTime CODEC(T64, LZ4), + `sign_flag` Int8 +) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092', +kafka_topic_list = 'hyperswitch-refund-events', +kafka_group_name = 'hyper-c1', +kafka_format = 'JSONEachRow', +kafka_handle_error_mode = 'stream'; + + +CREATE TABLE refund_dist ( + `internal_reference_id` String, + `refund_id` String, + `payment_id` String, + `merchant_id` String, + `connector_transaction_id` String, + `connector` LowCardinality(Nullable(String)), + `connector_refund_id` Nullable(String), + `external_reference_id` Nullable(String), + `refund_type` LowCardinality(String), + `total_amount` Nullable(UInt32), + `currency` LowCardinality(String), + `refund_amount` Nullable(UInt32), + `refund_status` LowCardinality(String), + `sent_to_gateway` Bool, + `refund_error_message` Nullable(String), + `refund_arn` Nullable(String), + `attempt_id` String, + `description` Nullable(String), + `refund_reason` Nullable(String), + `refund_error_code` Nullable(String), + `created_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4), + `sign_flag` Int8, + INDEX connectorIndex connector TYPE bloom_filter GRANULARITY 1, + INDEX refundTypeIndex refund_type TYPE bloom_filter GRANULARITY 1, + INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1, + INDEX statusIndex refund_status TYPE bloom_filter GRANULARITY 1 +) ENGINE = CollapsingMergeTree( + sign_flag +) +PARTITION BY toStartOfDay(created_at) +ORDER BY + (created_at, merchant_id, refund_id) +TTL created_at + toIntervalMonth(6) +; + +CREATE MATERIALIZED VIEW kafka_parse_refund TO refund_dist ( + `internal_reference_id` String, + `refund_id` String, + `payment_id` String, + `merchant_id` String, + `connector_transaction_id` String, + `connector` LowCardinality(Nullable(String)), + `connector_refund_id` Nullable(String), + `external_reference_id` Nullable(String), + `refund_type` LowCardinality(String), + `total_amount` Nullable(UInt32), + `currency` LowCardinality(String), + `refund_amount` Nullable(UInt32), + `refund_status` LowCardinality(String), + `sent_to_gateway` Bool, + `refund_error_message` Nullable(String), + `refund_arn` Nullable(String), + `attempt_id` String, + `description` Nullable(String), + `refund_reason` Nullable(String), + `refund_error_code` Nullable(String), + `created_at` DateTime64(3), + `modified_at` DateTime64(3), + `inserted_at` DateTime64(3), + `sign_flag` Int8 +) AS +SELECT + internal_reference_id, + refund_id, + payment_id, + merchant_id, + connector_transaction_id, + connector, + connector_refund_id, + external_reference_id, + refund_type, + total_amount, + currency, + refund_amount, + refund_status, + sent_to_gateway, + refund_error_message, + refund_arn, + attempt_id, + description, + refund_reason, + refund_error_code, + created_at, + modified_at, + now() as inserted_at, + sign_flag +FROM refund_queue; diff --git a/crates/analytics/src/api_event.rs b/crates/analytics/src/api_event.rs new file mode 100644 index 000000000000..113344d47254 --- /dev/null +++ b/crates/analytics/src/api_event.rs @@ -0,0 +1,9 @@ +mod core; +pub mod events; +pub mod filters; +pub mod metrics; +pub mod types; + +pub trait APIEventAnalytics: events::ApiLogsFilterAnalytics {} + +pub use self::core::{api_events_core, get_api_event_metrics, get_filters}; diff --git a/crates/analytics/src/api_event/core.rs b/crates/analytics/src/api_event/core.rs new file mode 100644 index 000000000000..b368d6374f75 --- /dev/null +++ b/crates/analytics/src/api_event/core.rs @@ -0,0 +1,176 @@ +use std::collections::HashMap; + +use api_models::analytics::{ + api_event::{ + ApiEventMetricsBucketIdentifier, ApiEventMetricsBucketValue, ApiLogsRequest, + ApiMetricsBucketResponse, + }, + AnalyticsMetadata, ApiEventFiltersResponse, GetApiEventFiltersRequest, + GetApiEventMetricRequest, MetricsResponse, +}; +use error_stack::{IntoReport, ResultExt}; +use router_env::{ + instrument, logger, + tracing::{self, Instrument}, +}; + +use super::{ + events::{get_api_event, ApiLogsResult}, + metrics::ApiEventMetricRow, +}; +use crate::{ + errors::{AnalyticsError, AnalyticsResult}, + metrics, + types::FiltersError, + AnalyticsProvider, +}; + +#[instrument(skip_all)] +pub async fn api_events_core( + pool: &AnalyticsProvider, + req: ApiLogsRequest, + merchant_id: String, +) -> AnalyticsResult> { + let data = match pool { + AnalyticsProvider::Sqlx(_) => Err(FiltersError::NotImplemented) + .into_report() + .attach_printable("SQL Analytics is not implemented for API Events"), + AnalyticsProvider::Clickhouse(pool) => get_api_event(&merchant_id, req, pool).await, + AnalyticsProvider::CombinedSqlx(_sqlx_pool, ckh_pool) + | AnalyticsProvider::CombinedCkh(_sqlx_pool, ckh_pool) => { + get_api_event(&merchant_id, req, ckh_pool).await + } + } + .change_context(AnalyticsError::UnknownError)?; + Ok(data) +} + +pub async fn get_filters( + pool: &AnalyticsProvider, + req: GetApiEventFiltersRequest, + merchant_id: String, +) -> AnalyticsResult { + use api_models::analytics::{api_event::ApiEventDimensions, ApiEventFilterValue}; + + use super::filters::get_api_event_filter_for_dimension; + use crate::api_event::filters::ApiEventFilter; + + let mut res = ApiEventFiltersResponse::default(); + for dim in req.group_by_names { + let values = match pool { + AnalyticsProvider::Sqlx(_pool) => Err(FiltersError::NotImplemented) + .into_report() + .attach_printable("SQL Analytics is not implemented for API Events"), + AnalyticsProvider::Clickhouse(ckh_pool) + | AnalyticsProvider::CombinedSqlx(_, ckh_pool) + | AnalyticsProvider::CombinedCkh(_, ckh_pool) => { + get_api_event_filter_for_dimension(dim, &merchant_id, &req.time_range, ckh_pool) + .await + } + } + .change_context(AnalyticsError::UnknownError)? + .into_iter() + .filter_map(|fil: ApiEventFilter| match dim { + ApiEventDimensions::StatusCode => fil.status_code.map(|i| i.to_string()), + ApiEventDimensions::FlowType => fil.flow_type, + ApiEventDimensions::ApiFlow => fil.api_flow, + }) + .collect::>(); + res.query_data.push(ApiEventFilterValue { + dimension: dim, + values, + }) + } + + Ok(res) +} + +#[instrument(skip_all)] +pub async fn get_api_event_metrics( + pool: &AnalyticsProvider, + merchant_id: &str, + req: GetApiEventMetricRequest, +) -> AnalyticsResult> { + let mut metrics_accumulator: HashMap = + HashMap::new(); + + let mut set = tokio::task::JoinSet::new(); + for metric_type in req.metrics.iter().cloned() { + let req = req.clone(); + let pool = pool.clone(); + let task_span = tracing::debug_span!( + "analytics_api_metrics_query", + api_event_metric = metric_type.as_ref() + ); + + // TODO: lifetime issues with joinset, + // can be optimized away if joinset lifetime requirements are relaxed + let merchant_id_scoped = merchant_id.to_owned(); + set.spawn( + async move { + let data = pool + .get_api_event_metrics( + &metric_type, + &req.group_by_names.clone(), + &merchant_id_scoped, + &req.filters, + &req.time_series.map(|t| t.granularity), + &req.time_range, + ) + .await + .change_context(AnalyticsError::UnknownError); + (metric_type, data) + } + .instrument(task_span), + ); + } + + while let Some((metric, data)) = set + .join_next() + .await + .transpose() + .into_report() + .change_context(AnalyticsError::UnknownError)? + { + let data = data?; + let attributes = &[ + metrics::request::add_attributes("metric_type", metric.to_string()), + metrics::request::add_attributes("source", pool.to_string()), + ]; + + let value = u64::try_from(data.len()); + if let Ok(val) = value { + metrics::BUCKETS_FETCHED.record(&metrics::CONTEXT, val, attributes); + logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val); + } + for (id, value) in data { + metrics_accumulator + .entry(id) + .and_modify(|data| { + data.api_count = data.api_count.or(value.api_count); + data.status_code_count = data.status_code_count.or(value.status_code_count); + data.latency = data.latency.or(value.latency); + }) + .or_insert(value); + } + } + + let query_data: Vec = metrics_accumulator + .into_iter() + .map(|(id, val)| ApiMetricsBucketResponse { + values: ApiEventMetricsBucketValue { + latency: val.latency, + api_count: val.api_count, + status_code_count: val.status_code_count, + }, + dimensions: id, + }) + .collect(); + + Ok(MetricsResponse { + query_data, + meta_data: [AnalyticsMetadata { + current_time_range: req.time_range, + }], + }) +} diff --git a/crates/analytics/src/api_event/events.rs b/crates/analytics/src/api_event/events.rs new file mode 100644 index 000000000000..73b3fb9cbad2 --- /dev/null +++ b/crates/analytics/src/api_event/events.rs @@ -0,0 +1,105 @@ +use api_models::analytics::{ + api_event::{ApiLogsRequest, QueryType}, + Granularity, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use router_env::Flow; +use time::PrimitiveDateTime; + +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, FiltersError, FiltersResult, LoadRow}, +}; +pub trait ApiLogsFilterAnalytics: LoadRow {} + +pub async fn get_api_event( + merchant_id: &String, + query_param: ApiLogsRequest, + pool: &T, +) -> FiltersResult> +where + T: AnalyticsDataSource + ApiLogsFilterAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::ApiEvents); + query_builder.add_select_column("*").switch()?; + + query_builder + .add_filter_clause("merchant_id", merchant_id) + .switch()?; + match query_param.query_param { + QueryType::Payment { payment_id } => query_builder + .add_filter_clause("payment_id", payment_id) + .switch()?, + QueryType::Refund { + payment_id, + refund_id, + } => { + query_builder + .add_filter_clause("payment_id", payment_id) + .switch()?; + query_builder + .add_filter_clause("refund_id", refund_id) + .switch()?; + } + } + if let Some(list_api_name) = query_param.api_name_filter { + query_builder + .add_filter_in_range_clause("api_flow", &list_api_name) + .switch()?; + } else { + query_builder + .add_filter_in_range_clause( + "api_flow", + &[ + Flow::PaymentsCancel, + Flow::PaymentsCapture, + Flow::PaymentsConfirm, + Flow::PaymentsCreate, + Flow::PaymentsStart, + Flow::PaymentsUpdate, + Flow::RefundsCreate, + Flow::IncomingWebhookReceive, + ], + ) + .switch()?; + } + //TODO!: update the execute_query function to return reports instead of plain errors... + query_builder + .execute_query::(pool) + .await + .change_context(FiltersError::QueryBuildingError)? + .change_context(FiltersError::QueryExecutionFailure) +} +#[derive(Debug, serde::Serialize, serde::Deserialize)] +pub struct ApiLogsResult { + pub merchant_id: String, + pub payment_id: Option, + pub refund_id: Option, + pub payment_method_id: Option, + pub payment_method: Option, + pub payment_method_type: Option, + pub customer_id: Option, + pub user_id: Option, + pub connector: Option, + pub request_id: Option, + pub flow_type: String, + pub api_flow: String, + pub api_auth_type: Option, + pub request: String, + pub response: Option, + pub error: Option, + pub authentication_data: Option, + pub status_code: u16, + pub latency: Option, + pub user_agent: Option, + pub hs_latency: Option, + pub ip_addr: Option, + #[serde(with = "common_utils::custom_serde::iso8601")] + pub created_at: PrimitiveDateTime, +} diff --git a/crates/analytics/src/api_event/filters.rs b/crates/analytics/src/api_event/filters.rs new file mode 100644 index 000000000000..87414ebad4ba --- /dev/null +++ b/crates/analytics/src/api_event/filters.rs @@ -0,0 +1,53 @@ +use api_models::analytics::{api_event::ApiEventDimensions, Granularity, TimeRange}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, FiltersError, FiltersResult, LoadRow}, +}; + +pub trait ApiEventFilterAnalytics: LoadRow {} + +pub async fn get_api_event_filter_for_dimension( + dimension: ApiEventDimensions, + merchant_id: &String, + time_range: &TimeRange, + pool: &T, +) -> FiltersResult> +where + T: AnalyticsDataSource + ApiEventFilterAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::ApiEvents); + + query_builder.add_select_column(dimension).switch()?; + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + query_builder + .add_filter_clause("merchant_id", merchant_id) + .switch()?; + + query_builder.set_distinct(); + + query_builder + .execute_query::(pool) + .await + .change_context(FiltersError::QueryBuildingError)? + .change_context(FiltersError::QueryExecutionFailure) +} + +#[derive(Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)] +pub struct ApiEventFilter { + pub status_code: Option, + pub flow_type: Option, + pub api_flow: Option, +} diff --git a/crates/analytics/src/api_event/metrics.rs b/crates/analytics/src/api_event/metrics.rs new file mode 100644 index 000000000000..16f2d7a2f5ab --- /dev/null +++ b/crates/analytics/src/api_event/metrics.rs @@ -0,0 +1,110 @@ +use api_models::analytics::{ + api_event::{ + ApiEventDimensions, ApiEventFilters, ApiEventMetrics, ApiEventMetricsBucketIdentifier, + }, + Granularity, TimeRange, +}; +use time::PrimitiveDateTime; + +use crate::{ + query::{Aggregate, GroupByClause, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, LoadRow, MetricsResult}, +}; + +mod api_count; +pub mod latency; +mod status_code_count; +use api_count::ApiCount; +use latency::MaxLatency; +use status_code_count::StatusCodeCount; + +use self::latency::LatencyAvg; + +#[derive(Debug, PartialEq, Eq, serde::Deserialize)] +pub struct ApiEventMetricRow { + pub latency: Option, + pub api_count: Option, + pub status_code_count: Option, + #[serde(with = "common_utils::custom_serde::iso8601::option")] + pub start_bucket: Option, + #[serde(with = "common_utils::custom_serde::iso8601::option")] + pub end_bucket: Option, +} + +pub trait ApiEventMetricAnalytics: LoadRow + LoadRow {} + +#[async_trait::async_trait] +pub trait ApiEventMetric +where + T: AnalyticsDataSource + ApiEventMetricAnalytics, +{ + async fn load_metrics( + &self, + dimensions: &[ApiEventDimensions], + merchant_id: &str, + filters: &ApiEventFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult>; +} + +#[async_trait::async_trait] +impl ApiEventMetric for ApiEventMetrics +where + T: AnalyticsDataSource + ApiEventMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[ApiEventDimensions], + merchant_id: &str, + filters: &ApiEventFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + match self { + Self::Latency => { + MaxLatency + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::ApiCount => { + ApiCount + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::StatusCodeCount => { + StatusCodeCount + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + } + } +} diff --git a/crates/analytics/src/api_event/metrics/api_count.rs b/crates/analytics/src/api_event/metrics/api_count.rs new file mode 100644 index 000000000000..7f5f291aa53e --- /dev/null +++ b/crates/analytics/src/api_event/metrics/api_count.rs @@ -0,0 +1,106 @@ +use api_models::analytics::{ + api_event::{ApiEventDimensions, ApiEventFilters, ApiEventMetricsBucketIdentifier}, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::ApiEventMetricRow; +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct ApiCount; + +#[async_trait::async_trait] +impl super::ApiEventMetric for ApiCount +where + T: AnalyticsDataSource + super::ApiEventMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + _dimensions: &[ApiEventDimensions], + merchant_id: &str, + filters: &ApiEventFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::ApiEvents); + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("api_count"), + }) + .switch()?; + if !filters.flow_type.is_empty() { + query_builder + .add_filter_in_range_clause(ApiEventDimensions::FlowType, &filters.flow_type) + .attach_printable("Error adding flow_type filter") + .switch()?; + } + query_builder + .add_select_column(Aggregate::Min { + field: "created_at", + alias: Some("start_bucket"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Max { + field: "created_at", + alias: Some("end_bucket"), + }) + .switch()?; + if let Some(granularity) = granularity.as_ref() { + granularity + .set_group_by_clause(&mut query_builder) + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .add_filter_clause("merchant_id", merchant_id) + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + ApiEventMetricsBucketIdentifier::new(TimeRange { + start_time: match (granularity, i.start_bucket) { + (Some(g), Some(st)) => g.clip_to_start(st)?, + _ => time_range.start_time, + }, + end_time: granularity.as_ref().map_or_else( + || Ok(time_range.end_time), + |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), + )?, + }), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/analytics/src/api_event/metrics/latency.rs b/crates/analytics/src/api_event/metrics/latency.rs new file mode 100644 index 000000000000..379b39fbeb9e --- /dev/null +++ b/crates/analytics/src/api_event/metrics/latency.rs @@ -0,0 +1,138 @@ +use api_models::analytics::{ + api_event::{ApiEventDimensions, ApiEventFilters, ApiEventMetricsBucketIdentifier}, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::ApiEventMetricRow; +use crate::{ + query::{ + Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, + Window, + }, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct MaxLatency; + +#[async_trait::async_trait] +impl super::ApiEventMetric for MaxLatency +where + T: AnalyticsDataSource + super::ApiEventMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + _dimensions: &[ApiEventDimensions], + merchant_id: &str, + filters: &ApiEventFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::ApiEvents); + + query_builder + .add_select_column(Aggregate::Sum { + field: "latency", + alias: Some("latency_sum"), + }) + .switch()?; + + query_builder + .add_select_column(Aggregate::Count { + field: Some("latency"), + alias: Some("latency_count"), + }) + .switch()?; + + query_builder + .add_select_column(Aggregate::Min { + field: "created_at", + alias: Some("start_bucket"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Max { + field: "created_at", + alias: Some("end_bucket"), + }) + .switch()?; + if let Some(granularity) = granularity.as_ref() { + granularity + .set_group_by_clause(&mut query_builder) + .attach_printable("Error adding granularity") + .switch()?; + } + + filters.set_filter_clause(&mut query_builder).switch()?; + + query_builder + .add_filter_clause("merchant_id", merchant_id) + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + query_builder + .add_custom_filter_clause("request", "10.63.134.6", FilterTypes::NotLike) + .attach_printable("Error filtering out locker IP") + .switch()?; + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + ApiEventMetricsBucketIdentifier::new(TimeRange { + start_time: match (granularity, i.start_bucket) { + (Some(g), Some(st)) => g.clip_to_start(st)?, + _ => time_range.start_time, + }, + end_time: granularity.as_ref().map_or_else( + || Ok(time_range.end_time), + |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), + )?, + }), + ApiEventMetricRow { + latency: if i.latency_count != 0 { + Some(i.latency_sum.unwrap_or(0) / i.latency_count) + } else { + None + }, + api_count: None, + status_code_count: None, + start_bucket: i.start_bucket, + end_bucket: i.end_bucket, + }, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} + +#[derive(Debug, PartialEq, Eq, serde::Deserialize)] +pub struct LatencyAvg { + latency_sum: Option, + latency_count: u64, + #[serde(with = "common_utils::custom_serde::iso8601::option")] + pub start_bucket: Option, + #[serde(with = "common_utils::custom_serde::iso8601::option")] + pub end_bucket: Option, +} diff --git a/crates/analytics/src/api_event/metrics/status_code_count.rs b/crates/analytics/src/api_event/metrics/status_code_count.rs new file mode 100644 index 000000000000..5c652fd8e0c9 --- /dev/null +++ b/crates/analytics/src/api_event/metrics/status_code_count.rs @@ -0,0 +1,103 @@ +use api_models::analytics::{ + api_event::{ApiEventDimensions, ApiEventFilters, ApiEventMetricsBucketIdentifier}, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::ApiEventMetricRow; +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct StatusCodeCount; + +#[async_trait::async_trait] +impl super::ApiEventMetric for StatusCodeCount +where + T: AnalyticsDataSource + super::ApiEventMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + _dimensions: &[ApiEventDimensions], + merchant_id: &str, + filters: &ApiEventFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::ApiEvents); + + query_builder + .add_select_column(Aggregate::Count { + field: Some("status_code"), + alias: Some("status_code_count"), + }) + .switch()?; + + filters.set_filter_clause(&mut query_builder).switch()?; + + query_builder + .add_filter_clause("merchant_id", merchant_id) + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + query_builder + .add_select_column(Aggregate::Min { + field: "created_at", + alias: Some("start_bucket"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Max { + field: "created_at", + alias: Some("end_bucket"), + }) + .switch()?; + if let Some(granularity) = granularity.as_ref() { + granularity + .set_group_by_clause(&mut query_builder) + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + ApiEventMetricsBucketIdentifier::new(TimeRange { + start_time: match (granularity, i.start_bucket) { + (Some(g), Some(st)) => g.clip_to_start(st)?, + _ => time_range.start_time, + }, + end_time: granularity.as_ref().map_or_else( + || Ok(time_range.end_time), + |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), + )?, + }), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/analytics/src/api_event/types.rs b/crates/analytics/src/api_event/types.rs new file mode 100644 index 000000000000..72205fc72abf --- /dev/null +++ b/crates/analytics/src/api_event/types.rs @@ -0,0 +1,33 @@ +use api_models::analytics::api_event::{ApiEventDimensions, ApiEventFilters}; +use error_stack::ResultExt; + +use crate::{ + query::{QueryBuilder, QueryFilter, QueryResult, ToSql}, + types::{AnalyticsCollection, AnalyticsDataSource}, +}; + +impl QueryFilter for ApiEventFilters +where + T: AnalyticsDataSource, + AnalyticsCollection: ToSql, +{ + fn set_filter_clause(&self, builder: &mut QueryBuilder) -> QueryResult<()> { + if !self.status_code.is_empty() { + builder + .add_filter_in_range_clause(ApiEventDimensions::StatusCode, &self.status_code) + .attach_printable("Error adding status_code filter")?; + } + if !self.flow_type.is_empty() { + builder + .add_filter_in_range_clause(ApiEventDimensions::FlowType, &self.flow_type) + .attach_printable("Error adding flow_type filter")?; + } + if !self.api_flow.is_empty() { + builder + .add_filter_in_range_clause(ApiEventDimensions::ApiFlow, &self.api_flow) + .attach_printable("Error adding api_name filter")?; + } + + Ok(()) + } +} diff --git a/crates/analytics/src/clickhouse.rs b/crates/analytics/src/clickhouse.rs new file mode 100644 index 000000000000..964486c93649 --- /dev/null +++ b/crates/analytics/src/clickhouse.rs @@ -0,0 +1,458 @@ +use std::sync::Arc; + +use actix_web::http::StatusCode; +use common_utils::errors::ParsingError; +use error_stack::{IntoReport, Report, ResultExt}; +use router_env::logger; +use time::PrimitiveDateTime; + +use super::{ + payments::{ + distribution::PaymentDistributionRow, filters::FilterRow, metrics::PaymentMetricRow, + }, + query::{Aggregate, ToSql, Window}, + refunds::{filters::RefundFilterRow, metrics::RefundMetricRow}, + sdk_events::{filters::SdkEventFilter, metrics::SdkEventMetricRow}, + types::{AnalyticsCollection, AnalyticsDataSource, LoadRow, QueryExecutionError}, +}; +use crate::{ + api_event::{ + events::ApiLogsResult, + filters::ApiEventFilter, + metrics::{latency::LatencyAvg, ApiEventMetricRow}, + }, + sdk_events::events::SdkEventsResult, + types::TableEngine, +}; + +pub type ClickhouseResult = error_stack::Result; + +#[derive(Clone, Debug)] +pub struct ClickhouseClient { + pub config: Arc, +} + +#[derive(Clone, Debug, serde::Deserialize)] +pub struct ClickhouseConfig { + username: String, + password: Option, + host: String, + database_name: String, +} + +impl Default for ClickhouseConfig { + fn default() -> Self { + Self { + username: "default".to_string(), + password: None, + host: "http://localhost:8123".to_string(), + database_name: "default".to_string(), + } + } +} + +impl ClickhouseClient { + async fn execute_query(&self, query: &str) -> ClickhouseResult> { + logger::debug!("Executing query: {query}"); + let client = reqwest::Client::new(); + let params = CkhQuery { + date_time_output_format: String::from("iso"), + output_format_json_quote_64bit_integers: 0, + database: self.config.database_name.clone(), + }; + let response = client + .post(&self.config.host) + .query(¶ms) + .basic_auth(self.config.username.clone(), self.config.password.clone()) + .body(format!("{query}\nFORMAT JSON")) + .send() + .await + .into_report() + .change_context(ClickhouseError::ConnectionError)?; + + logger::debug!(clickhouse_response=?response, query=?query, "Clickhouse response"); + if response.status() != StatusCode::OK { + response.text().await.map_or_else( + |er| { + Err(ClickhouseError::ResponseError) + .into_report() + .attach_printable_lazy(|| format!("Error: {er:?}")) + }, + |t| Err(ClickhouseError::ResponseNotOK(t)).into_report(), + ) + } else { + Ok(response + .json::>() + .await + .into_report() + .change_context(ClickhouseError::ResponseError)? + .data) + } + } +} + +#[async_trait::async_trait] +impl AnalyticsDataSource for ClickhouseClient { + type Row = serde_json::Value; + + async fn load_results( + &self, + query: &str, + ) -> common_utils::errors::CustomResult, QueryExecutionError> + where + Self: LoadRow, + { + self.execute_query(query) + .await + .change_context(QueryExecutionError::DatabaseError)? + .into_iter() + .map(Self::load_row) + .collect::, _>>() + .change_context(QueryExecutionError::RowExtractionFailure) + } + + fn get_table_engine(table: AnalyticsCollection) -> TableEngine { + match table { + AnalyticsCollection::Payment + | AnalyticsCollection::Refund + | AnalyticsCollection::PaymentIntent => { + TableEngine::CollapsingMergeTree { sign: "sign_flag" } + } + AnalyticsCollection::SdkEvents => TableEngine::BasicTree, + AnalyticsCollection::ApiEvents => TableEngine::BasicTree, + } + } +} + +impl LoadRow for ClickhouseClient +where + Self::Row: TryInto>, +{ + fn load_row(row: Self::Row) -> common_utils::errors::CustomResult { + row.try_into() + .change_context(QueryExecutionError::RowExtractionFailure) + } +} + +impl super::payments::filters::PaymentFilterAnalytics for ClickhouseClient {} +impl super::payments::metrics::PaymentMetricAnalytics for ClickhouseClient {} +impl super::payments::distribution::PaymentDistributionAnalytics for ClickhouseClient {} +impl super::refunds::metrics::RefundMetricAnalytics for ClickhouseClient {} +impl super::refunds::filters::RefundFilterAnalytics for ClickhouseClient {} +impl super::sdk_events::filters::SdkEventFilterAnalytics for ClickhouseClient {} +impl super::sdk_events::metrics::SdkEventMetricAnalytics for ClickhouseClient {} +impl super::sdk_events::events::SdkEventsFilterAnalytics for ClickhouseClient {} +impl super::api_event::events::ApiLogsFilterAnalytics for ClickhouseClient {} +impl super::api_event::filters::ApiEventFilterAnalytics for ClickhouseClient {} +impl super::api_event::metrics::ApiEventMetricAnalytics for ClickhouseClient {} + +#[derive(Debug, serde::Serialize)] +struct CkhQuery { + date_time_output_format: String, + output_format_json_quote_64bit_integers: u8, + database: String, +} + +#[derive(Debug, serde::Deserialize)] +struct CkhOutput { + data: Vec, +} + +impl TryInto for serde_json::Value { + type Error = Report; + + fn try_into(self) -> Result { + serde_json::from_value(self) + .into_report() + .change_context(ParsingError::StructParseFailure( + "Failed to parse ApiLogsResult in clickhouse results", + )) + } +} + +impl TryInto for serde_json::Value { + type Error = Report; + + fn try_into(self) -> Result { + serde_json::from_value(self) + .into_report() + .change_context(ParsingError::StructParseFailure( + "Failed to parse SdkEventsResult in clickhouse results", + )) + } +} + +impl TryInto for serde_json::Value { + type Error = Report; + + fn try_into(self) -> Result { + serde_json::from_value(self) + .into_report() + .change_context(ParsingError::StructParseFailure( + "Failed to parse PaymentMetricRow in clickhouse results", + )) + } +} + +impl TryInto for serde_json::Value { + type Error = Report; + + fn try_into(self) -> Result { + serde_json::from_value(self) + .into_report() + .change_context(ParsingError::StructParseFailure( + "Failed to parse PaymentDistributionRow in clickhouse results", + )) + } +} + +impl TryInto for serde_json::Value { + type Error = Report; + + fn try_into(self) -> Result { + serde_json::from_value(self) + .into_report() + .change_context(ParsingError::StructParseFailure( + "Failed to parse FilterRow in clickhouse results", + )) + } +} + +impl TryInto for serde_json::Value { + type Error = Report; + + fn try_into(self) -> Result { + serde_json::from_value(self) + .into_report() + .change_context(ParsingError::StructParseFailure( + "Failed to parse RefundMetricRow in clickhouse results", + )) + } +} + +impl TryInto for serde_json::Value { + type Error = Report; + + fn try_into(self) -> Result { + serde_json::from_value(self) + .into_report() + .change_context(ParsingError::StructParseFailure( + "Failed to parse RefundFilterRow in clickhouse results", + )) + } +} + +impl TryInto for serde_json::Value { + type Error = Report; + + fn try_into(self) -> Result { + serde_json::from_value(self) + .into_report() + .change_context(ParsingError::StructParseFailure( + "Failed to parse ApiEventMetricRow in clickhouse results", + )) + } +} + +impl TryInto for serde_json::Value { + type Error = Report; + + fn try_into(self) -> Result { + serde_json::from_value(self) + .into_report() + .change_context(ParsingError::StructParseFailure( + "Failed to parse LatencyAvg in clickhouse results", + )) + } +} + +impl TryInto for serde_json::Value { + type Error = Report; + + fn try_into(self) -> Result { + serde_json::from_value(self) + .into_report() + .change_context(ParsingError::StructParseFailure( + "Failed to parse SdkEventMetricRow in clickhouse results", + )) + } +} + +impl TryInto for serde_json::Value { + type Error = Report; + + fn try_into(self) -> Result { + serde_json::from_value(self) + .into_report() + .change_context(ParsingError::StructParseFailure( + "Failed to parse SdkEventFilter in clickhouse results", + )) + } +} + +impl TryInto for serde_json::Value { + type Error = Report; + + fn try_into(self) -> Result { + serde_json::from_value(self) + .into_report() + .change_context(ParsingError::StructParseFailure( + "Failed to parse ApiEventFilter in clickhouse results", + )) + } +} + +impl ToSql for PrimitiveDateTime { + fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result { + let format = + time::format_description::parse("[year]-[month]-[day] [hour]:[minute]:[second]") + .into_report() + .change_context(ParsingError::DateTimeParsingError) + .attach_printable("Failed to parse format description")?; + self.format(&format) + .into_report() + .change_context(ParsingError::EncodeError( + "failed to encode to clickhouse date-time format", + )) + .attach_printable("Failed to format date time") + } +} + +impl ToSql for AnalyticsCollection { + fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result { + match self { + Self::Payment => Ok("payment_attempt_dist".to_string()), + Self::Refund => Ok("refund_dist".to_string()), + Self::SdkEvents => Ok("sdk_events_dist".to_string()), + Self::ApiEvents => Ok("api_audit_log".to_string()), + Self::PaymentIntent => Ok("payment_intents_dist".to_string()), + } + } +} + +impl ToSql for Aggregate +where + T: ToSql, +{ + fn to_sql(&self, table_engine: &TableEngine) -> error_stack::Result { + Ok(match self { + Self::Count { field: _, alias } => { + let query = match table_engine { + TableEngine::CollapsingMergeTree { sign } => format!("sum({sign})"), + TableEngine::BasicTree => "count(*)".to_string(), + }; + format!( + "{query}{}", + alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias)) + ) + } + Self::Sum { field, alias } => { + let query = match table_engine { + TableEngine::CollapsingMergeTree { sign } => format!( + "sum({sign} * {})", + field + .to_sql(table_engine) + .attach_printable("Failed to sum aggregate")? + ), + TableEngine::BasicTree => format!( + "sum({})", + field + .to_sql(table_engine) + .attach_printable("Failed to sum aggregate")? + ), + }; + format!( + "{query}{}", + alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias)) + ) + } + Self::Min { field, alias } => { + format!( + "min({}){}", + field + .to_sql(table_engine) + .attach_printable("Failed to min aggregate")?, + alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias)) + ) + } + Self::Max { field, alias } => { + format!( + "max({}){}", + field + .to_sql(table_engine) + .attach_printable("Failed to max aggregate")?, + alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias)) + ) + } + }) + } +} + +impl ToSql for Window +where + T: ToSql, +{ + fn to_sql(&self, table_engine: &TableEngine) -> error_stack::Result { + Ok(match self { + Self::Sum { + field, + partition_by, + order_by, + alias, + } => { + format!( + "sum({}) over ({}{}){}", + field + .to_sql(table_engine) + .attach_printable("Failed to sum window")?, + partition_by.as_ref().map_or_else( + || "".to_owned(), + |partition_by| format!("partition by {}", partition_by.to_owned()) + ), + order_by.as_ref().map_or_else( + || "".to_owned(), + |(order_column, order)| format!( + " order by {} {}", + order_column.to_owned(), + order.to_string() + ) + ), + alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias)) + ) + } + Self::RowNumber { + field: _, + partition_by, + order_by, + alias, + } => { + format!( + "row_number() over ({}{}){}", + partition_by.as_ref().map_or_else( + || "".to_owned(), + |partition_by| format!("partition by {}", partition_by.to_owned()) + ), + order_by.as_ref().map_or_else( + || "".to_owned(), + |(order_column, order)| format!( + " order by {} {}", + order_column.to_owned(), + order.to_string() + ) + ), + alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias)) + ) + } + }) + } +} + +#[derive(Debug, thiserror::Error)] +pub enum ClickhouseError { + #[error("Clickhouse connection error")] + ConnectionError, + #[error("Clickhouse NON-200 response content: '{0}'")] + ResponseNotOK(String), + #[error("Clickhouse response error")] + ResponseError, +} diff --git a/crates/analytics/src/core.rs b/crates/analytics/src/core.rs new file mode 100644 index 000000000000..354e1e2f1766 --- /dev/null +++ b/crates/analytics/src/core.rs @@ -0,0 +1,31 @@ +use api_models::analytics::GetInfoResponse; + +use crate::{types::AnalyticsDomain, utils}; + +pub async fn get_domain_info( + domain: AnalyticsDomain, +) -> crate::errors::AnalyticsResult { + let info = match domain { + AnalyticsDomain::Payments => GetInfoResponse { + metrics: utils::get_payment_metrics_info(), + download_dimensions: None, + dimensions: utils::get_payment_dimensions(), + }, + AnalyticsDomain::Refunds => GetInfoResponse { + metrics: utils::get_refund_metrics_info(), + download_dimensions: None, + dimensions: utils::get_refund_dimensions(), + }, + AnalyticsDomain::SdkEvents => GetInfoResponse { + metrics: utils::get_sdk_event_metrics_info(), + download_dimensions: None, + dimensions: utils::get_sdk_event_dimensions(), + }, + AnalyticsDomain::ApiEvents => GetInfoResponse { + metrics: utils::get_api_event_metrics_info(), + download_dimensions: None, + dimensions: utils::get_api_event_dimensions(), + }, + }; + Ok(info) +} diff --git a/crates/router/src/analytics/errors.rs b/crates/analytics/src/errors.rs similarity index 100% rename from crates/router/src/analytics/errors.rs rename to crates/analytics/src/errors.rs diff --git a/crates/analytics/src/lambda_utils.rs b/crates/analytics/src/lambda_utils.rs new file mode 100644 index 000000000000..f9446a402b4e --- /dev/null +++ b/crates/analytics/src/lambda_utils.rs @@ -0,0 +1,36 @@ +use aws_config::{self, meta::region::RegionProviderChain}; +use aws_sdk_lambda::{config::Region, types::InvocationType::Event, Client}; +use aws_smithy_types::Blob; +use common_utils::errors::CustomResult; +use error_stack::{IntoReport, ResultExt}; + +use crate::errors::AnalyticsError; + +async fn get_aws_client(region: String) -> Client { + let region_provider = RegionProviderChain::first_try(Region::new(region)); + let sdk_config = aws_config::from_env().region(region_provider).load().await; + Client::new(&sdk_config) +} + +pub async fn invoke_lambda( + function_name: &str, + region: &str, + json_bytes: &[u8], +) -> CustomResult<(), AnalyticsError> { + get_aws_client(region.to_string()) + .await + .invoke() + .function_name(function_name) + .invocation_type(Event) + .payload(Blob::new(json_bytes.to_owned())) + .send() + .await + .into_report() + .map_err(|er| { + let er_rep = format!("{er:?}"); + er.attach_printable(er_rep) + }) + .change_context(AnalyticsError::UnknownError) + .attach_printable("Lambda invocation failed")?; + Ok(()) +} diff --git a/crates/analytics/src/lib.rs b/crates/analytics/src/lib.rs new file mode 100644 index 000000000000..24da77f84f2b --- /dev/null +++ b/crates/analytics/src/lib.rs @@ -0,0 +1,509 @@ +mod clickhouse; +pub mod core; +pub mod errors; +pub mod metrics; +pub mod payments; +mod query; +pub mod refunds; + +pub mod api_event; +pub mod sdk_events; +mod sqlx; +mod types; +use api_event::metrics::{ApiEventMetric, ApiEventMetricRow}; +pub use types::AnalyticsDomain; +pub mod lambda_utils; +pub mod utils; + +use std::sync::Arc; + +use api_models::analytics::{ + api_event::{ + ApiEventDimensions, ApiEventFilters, ApiEventMetrics, ApiEventMetricsBucketIdentifier, + }, + payments::{PaymentDimensions, PaymentFilters, PaymentMetrics, PaymentMetricsBucketIdentifier}, + refunds::{RefundDimensions, RefundFilters, RefundMetrics, RefundMetricsBucketIdentifier}, + sdk_events::{ + SdkEventDimensions, SdkEventFilters, SdkEventMetrics, SdkEventMetricsBucketIdentifier, + }, + Distribution, Granularity, TimeRange, +}; +use clickhouse::ClickhouseClient; +pub use clickhouse::ClickhouseConfig; +use error_stack::IntoReport; +use router_env::{ + logger, + tracing::{self, instrument}, +}; +use storage_impl::config::Database; + +use self::{ + payments::{ + distribution::{PaymentDistribution, PaymentDistributionRow}, + metrics::{PaymentMetric, PaymentMetricRow}, + }, + refunds::metrics::{RefundMetric, RefundMetricRow}, + sdk_events::metrics::{SdkEventMetric, SdkEventMetricRow}, + sqlx::SqlxClient, + types::MetricsError, +}; + +#[derive(Clone, Debug)] +pub enum AnalyticsProvider { + Sqlx(SqlxClient), + Clickhouse(ClickhouseClient), + CombinedCkh(SqlxClient, ClickhouseClient), + CombinedSqlx(SqlxClient, ClickhouseClient), +} + +impl Default for AnalyticsProvider { + fn default() -> Self { + Self::Sqlx(SqlxClient::default()) + } +} + +impl ToString for AnalyticsProvider { + fn to_string(&self) -> String { + String::from(match self { + Self::Clickhouse(_) => "Clickhouse", + Self::Sqlx(_) => "Sqlx", + Self::CombinedCkh(_, _) => "CombinedCkh", + Self::CombinedSqlx(_, _) => "CombinedSqlx", + }) + } +} + +impl AnalyticsProvider { + #[instrument(skip_all)] + pub async fn get_payment_metrics( + &self, + metric: &PaymentMetrics, + dimensions: &[PaymentDimensions], + merchant_id: &str, + filters: &PaymentFilters, + granularity: &Option, + time_range: &TimeRange, + ) -> types::MetricsResult> { + // Metrics to get the fetch time for each payment metric + metrics::request::record_operation_time( + async { + match self { + Self::Sqlx(pool) => { + metric + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::Clickhouse(pool) => { + metric + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::CombinedCkh(sqlx_pool, ckh_pool) => { + let (ckh_result, sqlx_result) = tokio::join!(metric + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + ckh_pool, + ), + metric + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + sqlx_pool, + )); + match (&sqlx_result, &ckh_result) { + (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { + router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics metrics") + }, + _ => {} + + }; + + ckh_result + } + Self::CombinedSqlx(sqlx_pool, ckh_pool) => { + let (ckh_result, sqlx_result) = tokio::join!(metric + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + ckh_pool, + ), + metric + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + sqlx_pool, + )); + match (&sqlx_result, &ckh_result) { + (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { + router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics metrics") + }, + _ => {} + + }; + + sqlx_result + } + } + }, + &metrics::METRIC_FETCH_TIME, + metric, + self, + ) + .await + } + + pub async fn get_payment_distribution( + &self, + distribution: &Distribution, + dimensions: &[PaymentDimensions], + merchant_id: &str, + filters: &PaymentFilters, + granularity: &Option, + time_range: &TimeRange, + ) -> types::MetricsResult> { + // Metrics to get the fetch time for each payment metric + metrics::request::record_operation_time( + async { + match self { + Self::Sqlx(pool) => { + distribution.distribution_for + .load_distribution( + distribution, + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::Clickhouse(pool) => { + distribution.distribution_for + .load_distribution( + distribution, + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::CombinedCkh(sqlx_pool, ckh_pool) => { + let (ckh_result, sqlx_result) = tokio::join!(distribution.distribution_for + .load_distribution( + distribution, + dimensions, + merchant_id, + filters, + granularity, + time_range, + ckh_pool, + ), + distribution.distribution_for + .load_distribution( + distribution, + dimensions, + merchant_id, + filters, + granularity, + time_range, + sqlx_pool, + )); + match (&sqlx_result, &ckh_result) { + (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { + router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics distribution") + }, + _ => {} + + }; + + ckh_result + } + Self::CombinedSqlx(sqlx_pool, ckh_pool) => { + let (ckh_result, sqlx_result) = tokio::join!(distribution.distribution_for + .load_distribution( + distribution, + dimensions, + merchant_id, + filters, + granularity, + time_range, + ckh_pool, + ), + distribution.distribution_for + .load_distribution( + distribution, + dimensions, + merchant_id, + filters, + granularity, + time_range, + sqlx_pool, + )); + match (&sqlx_result, &ckh_result) { + (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { + router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics distribution") + }, + _ => {} + + }; + + sqlx_result + } + } + }, + &metrics::METRIC_FETCH_TIME, + &distribution.distribution_for, + self, + ) + .await + } + + pub async fn get_refund_metrics( + &self, + metric: &RefundMetrics, + dimensions: &[RefundDimensions], + merchant_id: &str, + filters: &RefundFilters, + granularity: &Option, + time_range: &TimeRange, + ) -> types::MetricsResult> { + // Metrics to get the fetch time for each refund metric + metrics::request::record_operation_time( + async { + match self { + Self::Sqlx(pool) => { + metric + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::Clickhouse(pool) => { + metric + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::CombinedCkh(sqlx_pool, ckh_pool) => { + let (ckh_result, sqlx_result) = tokio::join!( + metric.load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + ckh_pool, + ), + metric.load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + sqlx_pool, + ) + ); + match (&sqlx_result, &ckh_result) { + (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { + logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres refunds analytics metrics") + } + _ => {} + }; + ckh_result + } + Self::CombinedSqlx(sqlx_pool, ckh_pool) => { + let (ckh_result, sqlx_result) = tokio::join!( + metric.load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + ckh_pool, + ), + metric.load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + sqlx_pool, + ) + ); + match (&sqlx_result, &ckh_result) { + (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { + logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres refunds analytics metrics") + } + _ => {} + }; + sqlx_result + } + } + }, + &metrics::METRIC_FETCH_TIME, + metric, + self, + ) + .await + } + + pub async fn get_sdk_event_metrics( + &self, + metric: &SdkEventMetrics, + dimensions: &[SdkEventDimensions], + pub_key: &str, + filters: &SdkEventFilters, + granularity: &Option, + time_range: &TimeRange, + ) -> types::MetricsResult> { + match self { + Self::Sqlx(_pool) => Err(MetricsError::NotImplemented).into_report(), + Self::Clickhouse(pool) => { + metric + .load_metrics(dimensions, pub_key, filters, granularity, time_range, pool) + .await + } + Self::CombinedCkh(_sqlx_pool, ckh_pool) | Self::CombinedSqlx(_sqlx_pool, ckh_pool) => { + metric + .load_metrics( + dimensions, + pub_key, + filters, + granularity, + // Since SDK events are ckh only use ckh here + time_range, + ckh_pool, + ) + .await + } + } + } + + pub async fn get_api_event_metrics( + &self, + metric: &ApiEventMetrics, + dimensions: &[ApiEventDimensions], + pub_key: &str, + filters: &ApiEventFilters, + granularity: &Option, + time_range: &TimeRange, + ) -> types::MetricsResult> { + match self { + Self::Sqlx(_pool) => Err(MetricsError::NotImplemented).into_report(), + Self::Clickhouse(ckh_pool) + | Self::CombinedCkh(_, ckh_pool) + | Self::CombinedSqlx(_, ckh_pool) => { + // Since API events are ckh only use ckh here + metric + .load_metrics( + dimensions, + pub_key, + filters, + granularity, + time_range, + ckh_pool, + ) + .await + } + } + } + + pub async fn from_conf(config: &AnalyticsConfig) -> Self { + match config { + AnalyticsConfig::Sqlx { sqlx } => Self::Sqlx(SqlxClient::from_conf(sqlx).await), + AnalyticsConfig::Clickhouse { clickhouse } => Self::Clickhouse(ClickhouseClient { + config: Arc::new(clickhouse.clone()), + }), + AnalyticsConfig::CombinedCkh { sqlx, clickhouse } => Self::CombinedCkh( + SqlxClient::from_conf(sqlx).await, + ClickhouseClient { + config: Arc::new(clickhouse.clone()), + }, + ), + AnalyticsConfig::CombinedSqlx { sqlx, clickhouse } => Self::CombinedSqlx( + SqlxClient::from_conf(sqlx).await, + ClickhouseClient { + config: Arc::new(clickhouse.clone()), + }, + ), + } + } +} + +#[derive(Clone, Debug, serde::Deserialize)] +#[serde(tag = "source")] +#[serde(rename_all = "lowercase")] +pub enum AnalyticsConfig { + Sqlx { + sqlx: Database, + }, + Clickhouse { + clickhouse: ClickhouseConfig, + }, + CombinedCkh { + sqlx: Database, + clickhouse: ClickhouseConfig, + }, + CombinedSqlx { + sqlx: Database, + clickhouse: ClickhouseConfig, + }, +} + +impl Default for AnalyticsConfig { + fn default() -> Self { + Self::Sqlx { + sqlx: Database::default(), + } + } +} + +#[derive(Clone, Debug, serde::Deserialize, Default, serde::Serialize)] +pub struct ReportConfig { + pub payment_function: String, + pub refund_function: String, + pub dispute_function: String, + pub region: String, +} diff --git a/crates/analytics/src/main.rs b/crates/analytics/src/main.rs new file mode 100644 index 000000000000..5bf256ea9783 --- /dev/null +++ b/crates/analytics/src/main.rs @@ -0,0 +1,3 @@ +fn main() { + println!("Hello world"); +} diff --git a/crates/router/src/analytics/metrics.rs b/crates/analytics/src/metrics.rs similarity index 100% rename from crates/router/src/analytics/metrics.rs rename to crates/analytics/src/metrics.rs diff --git a/crates/router/src/analytics/metrics/request.rs b/crates/analytics/src/metrics/request.rs similarity index 51% rename from crates/router/src/analytics/metrics/request.rs rename to crates/analytics/src/metrics/request.rs index b7c202f2db25..3d1a78808f34 100644 --- a/crates/router/src/analytics/metrics/request.rs +++ b/crates/analytics/src/metrics/request.rs @@ -6,24 +6,20 @@ pub fn add_attributes>( } #[inline] -pub async fn record_operation_time( +pub async fn record_operation_time( future: F, metric: &once_cell::sync::Lazy>, - metric_name: &api_models::analytics::payments::PaymentMetrics, - source: &crate::analytics::AnalyticsProvider, + metric_name: &T, + source: &crate::AnalyticsProvider, ) -> R where F: futures::Future, + T: ToString, { let (result, time) = time_future(future).await; let attributes = &[ add_attributes("metric_name", metric_name.to_string()), - add_attributes( - "source", - match source { - crate::analytics::AnalyticsProvider::Sqlx(_) => "Sqlx", - }, - ), + add_attributes("source", source.to_string()), ]; let value = time.as_secs_f64(); metric.record(&super::CONTEXT, value, attributes); @@ -44,17 +40,3 @@ where let time_spent = start.elapsed(); (result, time_spent) } - -#[macro_export] -macro_rules! histogram_metric { - ($name:ident, $meter:ident) => { - pub(crate) static $name: once_cell::sync::Lazy< - $crate::opentelemetry::metrics::Histogram, - > = once_cell::sync::Lazy::new(|| $meter.u64_histogram(stringify!($name)).init()); - }; - ($name:ident, $meter:ident, $description:literal) => { - pub(crate) static $name: once_cell::sync::Lazy< - $crate::opentelemetry::metrics::Histogram, - > = once_cell::sync::Lazy::new(|| $meter.u64_histogram($description).init()); - }; -} diff --git a/crates/analytics/src/payments.rs b/crates/analytics/src/payments.rs new file mode 100644 index 000000000000..984647172c5b --- /dev/null +++ b/crates/analytics/src/payments.rs @@ -0,0 +1,16 @@ +pub mod accumulator; +mod core; +pub mod distribution; +pub mod filters; +pub mod metrics; +pub mod types; +pub use accumulator::{ + PaymentDistributionAccumulator, PaymentMetricAccumulator, PaymentMetricsAccumulator, +}; + +pub trait PaymentAnalytics: + metrics::PaymentMetricAnalytics + filters::PaymentFilterAnalytics +{ +} + +pub use self::core::{get_filters, get_metrics}; diff --git a/crates/router/src/analytics/payments/accumulator.rs b/crates/analytics/src/payments/accumulator.rs similarity index 62% rename from crates/router/src/analytics/payments/accumulator.rs rename to crates/analytics/src/payments/accumulator.rs index 5eebd0974693..c340f2888f8b 100644 --- a/crates/router/src/analytics/payments/accumulator.rs +++ b/crates/analytics/src/payments/accumulator.rs @@ -1,8 +1,9 @@ -use api_models::analytics::payments::PaymentMetricsBucketValue; -use common_enums::enums as storage_enums; +use api_models::analytics::payments::{ErrorResult, PaymentMetricsBucketValue}; +use bigdecimal::ToPrimitive; +use diesel_models::enums as storage_enums; use router_env::logger; -use super::metrics::PaymentMetricRow; +use super::{distribution::PaymentDistributionRow, metrics::PaymentMetricRow}; #[derive(Debug, Default)] pub struct PaymentMetricsAccumulator { @@ -11,6 +12,22 @@ pub struct PaymentMetricsAccumulator { pub payment_success: CountAccumulator, pub processed_amount: SumAccumulator, pub avg_ticket_size: AverageAccumulator, + pub payment_error_message: ErrorDistributionAccumulator, + pub retries_count: CountAccumulator, + pub retries_amount_processed: SumAccumulator, + pub connector_success_rate: SuccessRateAccumulator, +} + +#[derive(Debug, Default)] +pub struct ErrorDistributionRow { + pub count: i64, + pub total: i64, + pub error_message: String, +} + +#[derive(Debug, Default)] +pub struct ErrorDistributionAccumulator { + pub error_vec: Vec, } #[derive(Debug, Default)] @@ -45,6 +62,51 @@ pub trait PaymentMetricAccumulator { fn collect(self) -> Self::MetricOutput; } +pub trait PaymentDistributionAccumulator { + type DistributionOutput; + + fn add_distribution_bucket(&mut self, distribution: &PaymentDistributionRow); + + fn collect(self) -> Self::DistributionOutput; +} + +impl PaymentDistributionAccumulator for ErrorDistributionAccumulator { + type DistributionOutput = Option>; + + fn add_distribution_bucket(&mut self, distribution: &PaymentDistributionRow) { + self.error_vec.push(ErrorDistributionRow { + count: distribution.count.unwrap_or_default(), + total: distribution + .total + .clone() + .map(|i| i.to_i64().unwrap_or_default()) + .unwrap_or_default(), + error_message: distribution.error_message.clone().unwrap_or("".to_string()), + }) + } + + fn collect(mut self) -> Self::DistributionOutput { + if self.error_vec.is_empty() { + None + } else { + self.error_vec.sort_by(|a, b| b.count.cmp(&a.count)); + let mut res: Vec = Vec::new(); + for val in self.error_vec.into_iter() { + let perc = f64::from(u32::try_from(val.count).ok()?) * 100.0 + / f64::from(u32::try_from(val.total).ok()?); + + res.push(ErrorResult { + reason: val.error_message, + count: val.count, + percentage: (perc * 100.0).round() / 100.0, + }) + } + + Some(res) + } + } +} + impl PaymentMetricAccumulator for SuccessRateAccumulator { type MetricOutput = Option; @@ -145,6 +207,10 @@ impl PaymentMetricsAccumulator { payment_success_count: self.payment_success.collect(), payment_processed_amount: self.processed_amount.collect(), avg_ticket_size: self.avg_ticket_size.collect(), + payment_error_message: self.payment_error_message.collect(), + retries_count: self.retries_count.collect(), + retries_amount_processed: self.retries_amount_processed.collect(), + connector_success_rate: self.connector_success_rate.collect(), } } } diff --git a/crates/analytics/src/payments/core.rs b/crates/analytics/src/payments/core.rs new file mode 100644 index 000000000000..138e88789327 --- /dev/null +++ b/crates/analytics/src/payments/core.rs @@ -0,0 +1,303 @@ +#![allow(dead_code)] +use std::collections::HashMap; + +use api_models::analytics::{ + payments::{ + MetricsBucketResponse, PaymentDimensions, PaymentDistributions, PaymentMetrics, + PaymentMetricsBucketIdentifier, + }, + AnalyticsMetadata, FilterValue, GetPaymentFiltersRequest, GetPaymentMetricRequest, + MetricsResponse, PaymentFiltersResponse, +}; +use common_utils::errors::CustomResult; +use error_stack::{IntoReport, ResultExt}; +use router_env::{ + instrument, logger, + tracing::{self, Instrument}, +}; + +use super::{ + distribution::PaymentDistributionRow, + filters::{get_payment_filter_for_dimension, FilterRow}, + metrics::PaymentMetricRow, + PaymentMetricsAccumulator, +}; +use crate::{ + errors::{AnalyticsError, AnalyticsResult}, + metrics, + payments::{PaymentDistributionAccumulator, PaymentMetricAccumulator}, + AnalyticsProvider, +}; + +#[derive(Debug)] +pub enum TaskType { + MetricTask( + PaymentMetrics, + CustomResult, AnalyticsError>, + ), + DistributionTask( + PaymentDistributions, + CustomResult, AnalyticsError>, + ), +} + +#[instrument(skip_all)] +pub async fn get_metrics( + pool: &AnalyticsProvider, + merchant_id: &str, + req: GetPaymentMetricRequest, +) -> AnalyticsResult> { + let mut metrics_accumulator: HashMap< + PaymentMetricsBucketIdentifier, + PaymentMetricsAccumulator, + > = HashMap::new(); + + let mut set = tokio::task::JoinSet::new(); + for metric_type in req.metrics.iter().cloned() { + let req = req.clone(); + let pool = pool.clone(); + let task_span = tracing::debug_span!( + "analytics_payments_metrics_query", + payment_metric = metric_type.as_ref() + ); + + // TODO: lifetime issues with joinset, + // can be optimized away if joinset lifetime requirements are relaxed + let merchant_id_scoped = merchant_id.to_owned(); + set.spawn( + async move { + let data = pool + .get_payment_metrics( + &metric_type, + &req.group_by_names.clone(), + &merchant_id_scoped, + &req.filters, + &req.time_series.map(|t| t.granularity), + &req.time_range, + ) + .await + .change_context(AnalyticsError::UnknownError); + TaskType::MetricTask(metric_type, data) + } + .instrument(task_span), + ); + } + + if let Some(distribution) = req.clone().distribution { + let req = req.clone(); + let pool = pool.clone(); + let task_span = tracing::debug_span!( + "analytics_payments_distribution_query", + payment_distribution = distribution.distribution_for.as_ref() + ); + + let merchant_id_scoped = merchant_id.to_owned(); + set.spawn( + async move { + let data = pool + .get_payment_distribution( + &distribution, + &req.group_by_names.clone(), + &merchant_id_scoped, + &req.filters, + &req.time_series.map(|t| t.granularity), + &req.time_range, + ) + .await + .change_context(AnalyticsError::UnknownError); + TaskType::DistributionTask(distribution.distribution_for, data) + } + .instrument(task_span), + ); + } + + while let Some(task_type) = set + .join_next() + .await + .transpose() + .into_report() + .change_context(AnalyticsError::UnknownError)? + { + match task_type { + TaskType::MetricTask(metric, data) => { + let data = data?; + let attributes = &[ + metrics::request::add_attributes("metric_type", metric.to_string()), + metrics::request::add_attributes("source", pool.to_string()), + ]; + + let value = u64::try_from(data.len()); + if let Ok(val) = value { + metrics::BUCKETS_FETCHED.record(&metrics::CONTEXT, val, attributes); + logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val); + } + + for (id, value) in data { + logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for metric {metric}"); + let metrics_builder = metrics_accumulator.entry(id).or_default(); + match metric { + PaymentMetrics::PaymentSuccessRate => metrics_builder + .payment_success_rate + .add_metrics_bucket(&value), + PaymentMetrics::PaymentCount => { + metrics_builder.payment_count.add_metrics_bucket(&value) + } + PaymentMetrics::PaymentSuccessCount => { + metrics_builder.payment_success.add_metrics_bucket(&value) + } + PaymentMetrics::PaymentProcessedAmount => { + metrics_builder.processed_amount.add_metrics_bucket(&value) + } + PaymentMetrics::AvgTicketSize => { + metrics_builder.avg_ticket_size.add_metrics_bucket(&value) + } + PaymentMetrics::RetriesCount => { + metrics_builder.retries_count.add_metrics_bucket(&value); + metrics_builder + .retries_amount_processed + .add_metrics_bucket(&value) + } + PaymentMetrics::ConnectorSuccessRate => { + metrics_builder + .connector_success_rate + .add_metrics_bucket(&value); + } + } + } + + logger::debug!( + "Analytics Accumulated Results: metric: {}, results: {:#?}", + metric, + metrics_accumulator + ); + } + TaskType::DistributionTask(distribution, data) => { + let data = data?; + let attributes = &[ + metrics::request::add_attributes("distribution_type", distribution.to_string()), + metrics::request::add_attributes("source", pool.to_string()), + ]; + + let value = u64::try_from(data.len()); + if let Ok(val) = value { + metrics::BUCKETS_FETCHED.record(&metrics::CONTEXT, val, attributes); + logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val); + } + + for (id, value) in data { + logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for distribution {distribution}"); + let metrics_accumulator = metrics_accumulator.entry(id).or_default(); + match distribution { + PaymentDistributions::PaymentErrorMessage => metrics_accumulator + .payment_error_message + .add_distribution_bucket(&value), + } + } + + logger::debug!( + "Analytics Accumulated Results: distribution: {}, results: {:#?}", + distribution, + metrics_accumulator + ); + } + } + } + + let query_data: Vec = metrics_accumulator + .into_iter() + .map(|(id, val)| MetricsBucketResponse { + values: val.collect(), + dimensions: id, + }) + .collect(); + + Ok(MetricsResponse { + query_data, + meta_data: [AnalyticsMetadata { + current_time_range: req.time_range, + }], + }) +} + +pub async fn get_filters( + pool: &AnalyticsProvider, + req: GetPaymentFiltersRequest, + merchant_id: &String, +) -> AnalyticsResult { + let mut res = PaymentFiltersResponse::default(); + + for dim in req.group_by_names { + let values = match pool { + AnalyticsProvider::Sqlx(pool) => { + get_payment_filter_for_dimension(dim, merchant_id, &req.time_range, pool) + .await + } + AnalyticsProvider::Clickhouse(pool) => { + get_payment_filter_for_dimension(dim, merchant_id, &req.time_range, pool) + .await + } + AnalyticsProvider::CombinedCkh(sqlx_poll, ckh_pool) => { + let ckh_result = get_payment_filter_for_dimension( + dim, + merchant_id, + &req.time_range, + ckh_pool, + ) + .await; + let sqlx_result = get_payment_filter_for_dimension( + dim, + merchant_id, + &req.time_range, + sqlx_poll, + ) + .await; + match (&sqlx_result, &ckh_result) { + (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { + router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics filters") + }, + _ => {} + }; + ckh_result + } + AnalyticsProvider::CombinedSqlx(sqlx_poll, ckh_pool) => { + let ckh_result = get_payment_filter_for_dimension( + dim, + merchant_id, + &req.time_range, + ckh_pool, + ) + .await; + let sqlx_result = get_payment_filter_for_dimension( + dim, + merchant_id, + &req.time_range, + sqlx_poll, + ) + .await; + match (&sqlx_result, &ckh_result) { + (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { + router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics filters") + }, + _ => {} + }; + sqlx_result + } + } + .change_context(AnalyticsError::UnknownError)? + .into_iter() + .filter_map(|fil: FilterRow| match dim { + PaymentDimensions::Currency => fil.currency.map(|i| i.as_ref().to_string()), + PaymentDimensions::PaymentStatus => fil.status.map(|i| i.as_ref().to_string()), + PaymentDimensions::Connector => fil.connector, + PaymentDimensions::AuthType => fil.authentication_type.map(|i| i.as_ref().to_string()), + PaymentDimensions::PaymentMethod => fil.payment_method, + PaymentDimensions::PaymentMethodType => fil.payment_method_type, + }) + .collect::>(); + res.query_data.push(FilterValue { + dimension: dim, + values, + }) + } + Ok(res) +} diff --git a/crates/analytics/src/payments/distribution.rs b/crates/analytics/src/payments/distribution.rs new file mode 100644 index 000000000000..cf18c26310a7 --- /dev/null +++ b/crates/analytics/src/payments/distribution.rs @@ -0,0 +1,92 @@ +use api_models::analytics::{ + payments::{ + PaymentDimensions, PaymentDistributions, PaymentFilters, PaymentMetricsBucketIdentifier, + }, + Distribution, Granularity, TimeRange, +}; +use diesel_models::enums as storage_enums; +use time::PrimitiveDateTime; + +use crate::{ + query::{Aggregate, GroupByClause, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, MetricsResult}, +}; + +mod payment_error_message; + +use payment_error_message::PaymentErrorMessage; + +#[derive(Debug, PartialEq, Eq, serde::Deserialize)] +pub struct PaymentDistributionRow { + pub currency: Option>, + pub status: Option>, + pub connector: Option, + pub authentication_type: Option>, + pub payment_method: Option, + pub payment_method_type: Option, + pub total: Option, + pub count: Option, + pub error_message: Option, + #[serde(with = "common_utils::custom_serde::iso8601::option")] + pub start_bucket: Option, + #[serde(with = "common_utils::custom_serde::iso8601::option")] + pub end_bucket: Option, +} + +pub trait PaymentDistributionAnalytics: LoadRow {} + +#[async_trait::async_trait] +pub trait PaymentDistribution +where + T: AnalyticsDataSource + PaymentDistributionAnalytics, +{ + #[allow(clippy::too_many_arguments)] + async fn load_distribution( + &self, + distribution: &Distribution, + dimensions: &[PaymentDimensions], + merchant_id: &str, + filters: &PaymentFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult>; +} + +#[async_trait::async_trait] +impl PaymentDistribution for PaymentDistributions +where + T: AnalyticsDataSource + PaymentDistributionAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_distribution( + &self, + distribution: &Distribution, + dimensions: &[PaymentDimensions], + merchant_id: &str, + filters: &PaymentFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + match self { + Self::PaymentErrorMessage => { + PaymentErrorMessage + .load_distribution( + distribution, + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + } + } +} diff --git a/crates/analytics/src/payments/distribution/payment_error_message.rs b/crates/analytics/src/payments/distribution/payment_error_message.rs new file mode 100644 index 000000000000..c70fc09aeac4 --- /dev/null +++ b/crates/analytics/src/payments/distribution/payment_error_message.rs @@ -0,0 +1,176 @@ +use api_models::analytics::{ + payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier}, + Distribution, Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use diesel_models::enums as storage_enums; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::{PaymentDistribution, PaymentDistributionRow}; +use crate::{ + query::{ + Aggregate, GroupByClause, Order, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window, + }, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct PaymentErrorMessage; + +#[async_trait::async_trait] +impl PaymentDistribution for PaymentErrorMessage +where + T: AnalyticsDataSource + super::PaymentDistributionAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_distribution( + &self, + distribution: &Distribution, + dimensions: &[PaymentDimensions], + merchant_id: &str, + filters: &PaymentFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Payment); + + for dim in dimensions.iter() { + query_builder.add_select_column(dim).switch()?; + } + + query_builder + .add_select_column(&distribution.distribution_for) + .switch()?; + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Min { + field: "created_at", + alias: Some("start_bucket"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Max { + field: "created_at", + alias: Some("end_bucket"), + }) + .switch()?; + + filters.set_filter_clause(&mut query_builder).switch()?; + + query_builder + .add_filter_clause("merchant_id", merchant_id) + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + for dim in dimensions.iter() { + query_builder + .add_group_by_clause(dim) + .attach_printable("Error grouping by dimensions") + .switch()?; + } + + query_builder + .add_group_by_clause(&distribution.distribution_for) + .attach_printable("Error grouping by distribution_for") + .switch()?; + + if let Some(granularity) = granularity.as_ref() { + granularity + .set_group_by_clause(&mut query_builder) + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .add_filter_clause( + PaymentDimensions::PaymentStatus, + storage_enums::AttemptStatus::Failure, + ) + .switch()?; + + for dim in dimensions.iter() { + query_builder.add_outer_select_column(dim).switch()?; + } + + query_builder + .add_outer_select_column(&distribution.distribution_for) + .switch()?; + query_builder.add_outer_select_column("count").switch()?; + query_builder + .add_outer_select_column("start_bucket") + .switch()?; + query_builder + .add_outer_select_column("end_bucket") + .switch()?; + let sql_dimensions = query_builder.transform_to_sql_values(dimensions).switch()?; + + query_builder + .add_outer_select_column(Window::Sum { + field: "count", + partition_by: Some(sql_dimensions), + order_by: None, + alias: Some("total"), + }) + .switch()?; + + query_builder + .add_top_n_clause( + dimensions, + distribution.distribution_cardinality.into(), + "count", + Order::Descending, + ) + .switch()?; + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + PaymentMetricsBucketIdentifier::new( + i.currency.as_ref().map(|i| i.0), + i.status.as_ref().map(|i| i.0), + i.connector.clone(), + i.authentication_type.as_ref().map(|i| i.0), + i.payment_method.clone(), + i.payment_method_type.clone(), + TimeRange { + start_time: match (granularity, i.start_bucket) { + (Some(g), Some(st)) => g.clip_to_start(st)?, + _ => time_range.start_time, + }, + end_time: granularity.as_ref().map_or_else( + || Ok(time_range.end_time), + |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), + )?, + }, + ), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/router/src/analytics/payments/filters.rs b/crates/analytics/src/payments/filters.rs similarity index 87% rename from crates/router/src/analytics/payments/filters.rs rename to crates/analytics/src/payments/filters.rs index f009aaa76329..6c165f78a8e4 100644 --- a/crates/router/src/analytics/payments/filters.rs +++ b/crates/analytics/src/payments/filters.rs @@ -1,11 +1,11 @@ use api_models::analytics::{payments::PaymentDimensions, Granularity, TimeRange}; -use common_enums::enums::{AttemptStatus, AuthenticationType, Currency}; use common_utils::errors::ReportSwitchExt; +use diesel_models::enums::{AttemptStatus, AuthenticationType, Currency}; use error_stack::ResultExt; use time::PrimitiveDateTime; -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql}, +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window}, types::{ AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, FiltersError, FiltersResult, LoadRow, @@ -26,6 +26,7 @@ where AnalyticsCollection: ToSql, Granularity: GroupByClause, Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, { let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Payment); @@ -48,11 +49,12 @@ where .change_context(FiltersError::QueryExecutionFailure) } -#[derive(Debug, serde::Serialize, Eq, PartialEq)] +#[derive(Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)] pub struct FilterRow { pub currency: Option>, pub status: Option>, pub connector: Option, pub authentication_type: Option>, pub payment_method: Option, + pub payment_method_type: Option, } diff --git a/crates/router/src/analytics/payments/metrics.rs b/crates/analytics/src/payments/metrics.rs similarity index 76% rename from crates/router/src/analytics/payments/metrics.rs rename to crates/analytics/src/payments/metrics.rs index f492e5bd4df9..6fe6b6260d48 100644 --- a/crates/router/src/analytics/payments/metrics.rs +++ b/crates/analytics/src/payments/metrics.rs @@ -2,36 +2,44 @@ use api_models::analytics::{ payments::{PaymentDimensions, PaymentFilters, PaymentMetrics, PaymentMetricsBucketIdentifier}, Granularity, TimeRange, }; -use common_enums::enums as storage_enums; +use diesel_models::enums as storage_enums; use time::PrimitiveDateTime; -use crate::analytics::{ - query::{Aggregate, GroupByClause, ToSql}, +use crate::{ + query::{Aggregate, GroupByClause, ToSql, Window}, types::{AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, MetricsResult}, }; mod avg_ticket_size; +mod connector_success_rate; mod payment_count; mod payment_processed_amount; mod payment_success_count; +mod retries_count; mod success_rate; use avg_ticket_size::AvgTicketSize; +use connector_success_rate::ConnectorSuccessRate; use payment_count::PaymentCount; use payment_processed_amount::PaymentProcessedAmount; use payment_success_count::PaymentSuccessCount; use success_rate::PaymentSuccessRate; -#[derive(Debug, PartialEq, Eq)] +use self::retries_count::RetriesCount; + +#[derive(Debug, PartialEq, Eq, serde::Deserialize)] pub struct PaymentMetricRow { pub currency: Option>, pub status: Option>, pub connector: Option, pub authentication_type: Option>, pub payment_method: Option, + pub payment_method_type: Option, pub total: Option, pub count: Option, + #[serde(with = "common_utils::custom_serde::iso8601::option")] pub start_bucket: Option, + #[serde(with = "common_utils::custom_serde::iso8601::option")] pub end_bucket: Option, } @@ -61,6 +69,7 @@ where AnalyticsCollection: ToSql, Granularity: GroupByClause, Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, { async fn load_metrics( &self, @@ -132,6 +141,30 @@ where ) .await } + Self::RetriesCount => { + RetriesCount + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::ConnectorSuccessRate => { + ConnectorSuccessRate + .load_metrics( + dimensions, + merchant_id, + filters, + granularity, + time_range, + pool, + ) + .await + } } } } diff --git a/crates/router/src/analytics/payments/metrics/avg_ticket_size.rs b/crates/analytics/src/payments/metrics/avg_ticket_size.rs similarity index 90% rename from crates/router/src/analytics/payments/metrics/avg_ticket_size.rs rename to crates/analytics/src/payments/metrics/avg_ticket_size.rs index 2230d870e68a..9475d5288a64 100644 --- a/crates/router/src/analytics/payments/metrics/avg_ticket_size.rs +++ b/crates/analytics/src/payments/metrics/avg_ticket_size.rs @@ -3,12 +3,13 @@ use api_models::analytics::{ Granularity, TimeRange, }; use common_utils::errors::ReportSwitchExt; +use diesel_models::enums as storage_enums; use error_stack::ResultExt; use time::PrimitiveDateTime; use super::{PaymentMetric, PaymentMetricRow}; -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window}, types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, }; @@ -23,6 +24,7 @@ where AnalyticsCollection: ToSql, Granularity: GroupByClause, Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, { async fn load_metrics( &self, @@ -89,6 +91,13 @@ where .switch()?; } + query_builder + .add_filter_clause( + PaymentDimensions::PaymentStatus, + storage_enums::AttemptStatus::Charged, + ) + .switch()?; + query_builder .execute_query::(pool) .await @@ -103,6 +112,7 @@ where i.connector.clone(), i.authentication_type.as_ref().map(|i| i.0), i.payment_method.clone(), + i.payment_method_type.clone(), TimeRange { start_time: match (granularity, i.start_bucket) { (Some(g), Some(st)) => g.clip_to_start(st)?, @@ -119,7 +129,7 @@ where }) .collect::, - crate::analytics::query::PostProcessingError, + crate::query::PostProcessingError, >>() .change_context(MetricsError::PostProcessingFailure) } diff --git a/crates/analytics/src/payments/metrics/connector_success_rate.rs b/crates/analytics/src/payments/metrics/connector_success_rate.rs new file mode 100644 index 000000000000..0c4d19b2e0ba --- /dev/null +++ b/crates/analytics/src/payments/metrics/connector_success_rate.rs @@ -0,0 +1,130 @@ +use api_models::analytics::{ + payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier}, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::PaymentMetricRow; +use crate::{ + query::{ + Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, + Window, + }, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct ConnectorSuccessRate; + +#[async_trait::async_trait] +impl super::PaymentMetric for ConnectorSuccessRate +where + T: AnalyticsDataSource + super::PaymentMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[PaymentDimensions], + merchant_id: &str, + filters: &PaymentFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Payment); + let mut dimensions = dimensions.to_vec(); + + dimensions.push(PaymentDimensions::PaymentStatus); + + for dim in dimensions.iter() { + query_builder.add_select_column(dim).switch()?; + } + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Min { + field: "created_at", + alias: Some("start_bucket"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Max { + field: "created_at", + alias: Some("end_bucket"), + }) + .switch()?; + + filters.set_filter_clause(&mut query_builder).switch()?; + + query_builder + .add_filter_clause("merchant_id", merchant_id) + .switch()?; + query_builder + .add_custom_filter_clause(PaymentDimensions::Connector, "NULL", FilterTypes::IsNotNull) + .switch()?; + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + for dim in dimensions.iter() { + query_builder + .add_group_by_clause(dim) + .attach_printable("Error grouping by dimensions") + .switch()?; + } + + if let Some(granularity) = granularity.as_ref() { + granularity + .set_group_by_clause(&mut query_builder) + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + PaymentMetricsBucketIdentifier::new( + i.currency.as_ref().map(|i| i.0), + None, + i.connector.clone(), + i.authentication_type.as_ref().map(|i| i.0), + i.payment_method.clone(), + i.payment_method_type.clone(), + TimeRange { + start_time: match (granularity, i.start_bucket) { + (Some(g), Some(st)) => g.clip_to_start(st)?, + _ => time_range.start_time, + }, + end_time: granularity.as_ref().map_or_else( + || Ok(time_range.end_time), + |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), + )?, + }, + ), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/router/src/analytics/payments/metrics/payment_count.rs b/crates/analytics/src/payments/metrics/payment_count.rs similarity index 94% rename from crates/router/src/analytics/payments/metrics/payment_count.rs rename to crates/analytics/src/payments/metrics/payment_count.rs index 661cec3dac36..34e71f3da6fb 100644 --- a/crates/router/src/analytics/payments/metrics/payment_count.rs +++ b/crates/analytics/src/payments/metrics/payment_count.rs @@ -7,8 +7,8 @@ use error_stack::ResultExt; use time::PrimitiveDateTime; use super::PaymentMetricRow; -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window}, types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, }; @@ -23,6 +23,7 @@ where AnalyticsCollection: ToSql, Granularity: GroupByClause, Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, { async fn load_metrics( &self, @@ -97,6 +98,7 @@ where i.connector.clone(), i.authentication_type.as_ref().map(|i| i.0), i.payment_method.clone(), + i.payment_method_type.clone(), TimeRange { start_time: match (granularity, i.start_bucket) { (Some(g), Some(st)) => g.clip_to_start(st)?, @@ -111,7 +113,7 @@ where i, )) }) - .collect::, crate::analytics::query::PostProcessingError>>() + .collect::, crate::query::PostProcessingError>>() .change_context(MetricsError::PostProcessingFailure) } } diff --git a/crates/router/src/analytics/payments/metrics/payment_processed_amount.rs b/crates/analytics/src/payments/metrics/payment_processed_amount.rs similarity index 94% rename from crates/router/src/analytics/payments/metrics/payment_processed_amount.rs rename to crates/analytics/src/payments/metrics/payment_processed_amount.rs index 2ec0c6f18f9c..f2dbf97e0db9 100644 --- a/crates/router/src/analytics/payments/metrics/payment_processed_amount.rs +++ b/crates/analytics/src/payments/metrics/payment_processed_amount.rs @@ -2,14 +2,14 @@ use api_models::analytics::{ payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier}, Granularity, TimeRange, }; -use common_enums::enums as storage_enums; use common_utils::errors::ReportSwitchExt; +use diesel_models::enums as storage_enums; use error_stack::ResultExt; use time::PrimitiveDateTime; use super::PaymentMetricRow; -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window}, types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, }; @@ -24,6 +24,7 @@ where AnalyticsCollection: ToSql, Granularity: GroupByClause, Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, { async fn load_metrics( &self, @@ -105,6 +106,7 @@ where i.connector.clone(), i.authentication_type.as_ref().map(|i| i.0), i.payment_method.clone(), + i.payment_method_type.clone(), TimeRange { start_time: match (granularity, i.start_bucket) { (Some(g), Some(st)) => g.clip_to_start(st)?, @@ -121,7 +123,7 @@ where }) .collect::, - crate::analytics::query::PostProcessingError, + crate::query::PostProcessingError, >>() .change_context(MetricsError::PostProcessingFailure) } diff --git a/crates/router/src/analytics/payments/metrics/payment_success_count.rs b/crates/analytics/src/payments/metrics/payment_success_count.rs similarity index 94% rename from crates/router/src/analytics/payments/metrics/payment_success_count.rs rename to crates/analytics/src/payments/metrics/payment_success_count.rs index 8245fe7aeb88..a6fb8ed2239d 100644 --- a/crates/router/src/analytics/payments/metrics/payment_success_count.rs +++ b/crates/analytics/src/payments/metrics/payment_success_count.rs @@ -2,14 +2,14 @@ use api_models::analytics::{ payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier}, Granularity, TimeRange, }; -use common_enums::enums as storage_enums; use common_utils::errors::ReportSwitchExt; +use diesel_models::enums as storage_enums; use error_stack::ResultExt; use time::PrimitiveDateTime; use super::PaymentMetricRow; -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window}, types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, }; @@ -24,6 +24,7 @@ where AnalyticsCollection: ToSql, Granularity: GroupByClause, Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, { async fn load_metrics( &self, @@ -104,6 +105,7 @@ where i.connector.clone(), i.authentication_type.as_ref().map(|i| i.0), i.payment_method.clone(), + i.payment_method_type.clone(), TimeRange { start_time: match (granularity, i.start_bucket) { (Some(g), Some(st)) => g.clip_to_start(st)?, @@ -120,7 +122,7 @@ where }) .collect::, - crate::analytics::query::PostProcessingError, + crate::query::PostProcessingError, >>() .change_context(MetricsError::PostProcessingFailure) } diff --git a/crates/analytics/src/payments/metrics/retries_count.rs b/crates/analytics/src/payments/metrics/retries_count.rs new file mode 100644 index 000000000000..91952adb569a --- /dev/null +++ b/crates/analytics/src/payments/metrics/retries_count.rs @@ -0,0 +1,122 @@ +use api_models::analytics::{ + payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier}, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::PaymentMetricRow; +use crate::{ + query::{ + Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, + Window, + }, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct RetriesCount; + +#[async_trait::async_trait] +impl super::PaymentMetric for RetriesCount +where + T: AnalyticsDataSource + super::PaymentMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + _dimensions: &[PaymentDimensions], + merchant_id: &str, + _filters: &PaymentFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = + QueryBuilder::new(AnalyticsCollection::PaymentIntent); + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Sum { + field: "amount", + alias: Some("total"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Min { + field: "created_at", + alias: Some("start_bucket"), + }) + .switch()?; + query_builder + .add_select_column(Aggregate::Max { + field: "created_at", + alias: Some("end_bucket"), + }) + .switch()?; + query_builder + .add_filter_clause("merchant_id", merchant_id) + .switch()?; + query_builder + .add_custom_filter_clause("attempt_count", "1", FilterTypes::Gt) + .switch()?; + query_builder + .add_custom_filter_clause("status", "succeeded", FilterTypes::Equal) + .switch()?; + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + if let Some(granularity) = granularity.as_ref() { + granularity + .set_group_by_clause(&mut query_builder) + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + PaymentMetricsBucketIdentifier::new( + i.currency.as_ref().map(|i| i.0), + None, + i.connector.clone(), + i.authentication_type.as_ref().map(|i| i.0), + i.payment_method.clone(), + i.payment_method_type.clone(), + TimeRange { + start_time: match (granularity, i.start_bucket) { + (Some(g), Some(st)) => g.clip_to_start(st)?, + _ => time_range.start_time, + }, + end_time: granularity.as_ref().map_or_else( + || Ok(time_range.end_time), + |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(), + )?, + }, + ), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/router/src/analytics/payments/metrics/success_rate.rs b/crates/analytics/src/payments/metrics/success_rate.rs similarity index 95% rename from crates/router/src/analytics/payments/metrics/success_rate.rs rename to crates/analytics/src/payments/metrics/success_rate.rs index c63956d4b157..9e688240ddbf 100644 --- a/crates/router/src/analytics/payments/metrics/success_rate.rs +++ b/crates/analytics/src/payments/metrics/success_rate.rs @@ -7,8 +7,8 @@ use error_stack::ResultExt; use time::PrimitiveDateTime; use super::PaymentMetricRow; -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window}, types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, }; @@ -23,6 +23,7 @@ where AnalyticsCollection: ToSql, Granularity: GroupByClause, Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, { async fn load_metrics( &self, @@ -100,6 +101,7 @@ where i.connector.clone(), i.authentication_type.as_ref().map(|i| i.0), i.payment_method.clone(), + i.payment_method_type.clone(), TimeRange { start_time: match (granularity, i.start_bucket) { (Some(g), Some(st)) => g.clip_to_start(st)?, @@ -116,7 +118,7 @@ where }) .collect::, - crate::analytics::query::PostProcessingError, + crate::query::PostProcessingError, >>() .change_context(MetricsError::PostProcessingFailure) } diff --git a/crates/router/src/analytics/payments/types.rs b/crates/analytics/src/payments/types.rs similarity index 82% rename from crates/router/src/analytics/payments/types.rs rename to crates/analytics/src/payments/types.rs index fdfbedef383d..d5d8eca13e58 100644 --- a/crates/router/src/analytics/payments/types.rs +++ b/crates/analytics/src/payments/types.rs @@ -1,7 +1,7 @@ use api_models::analytics::payments::{PaymentDimensions, PaymentFilters}; use error_stack::ResultExt; -use crate::analytics::{ +use crate::{ query::{QueryBuilder, QueryFilter, QueryResult, ToSql}, types::{AnalyticsCollection, AnalyticsDataSource}, }; @@ -41,6 +41,15 @@ where .add_filter_in_range_clause(PaymentDimensions::PaymentMethod, &self.payment_method) .attach_printable("Error adding payment method filter")?; } + + if !self.payment_method_type.is_empty() { + builder + .add_filter_in_range_clause( + PaymentDimensions::PaymentMethodType, + &self.payment_method_type, + ) + .attach_printable("Error adding payment method filter")?; + } Ok(()) } } diff --git a/crates/router/src/analytics/query.rs b/crates/analytics/src/query.rs similarity index 65% rename from crates/router/src/analytics/query.rs rename to crates/analytics/src/query.rs index b1f621d8153d..b924987f004c 100644 --- a/crates/router/src/analytics/query.rs +++ b/crates/analytics/src/query.rs @@ -1,26 +1,26 @@ -#![allow(dead_code)] use std::marker::PhantomData; use api_models::{ analytics::{ self as analytics_api, - payments::PaymentDimensions, + api_event::ApiEventDimensions, + payments::{PaymentDimensions, PaymentDistributions}, refunds::{RefundDimensions, RefundType}, + sdk_events::{SdkEventDimensions, SdkEventNames}, Granularity, }, - enums::Connector, + enums::{ + AttemptStatus, AuthenticationType, Connector, Currency, PaymentMethod, PaymentMethodType, + }, refunds::RefundStatus, }; -use common_enums::{ - enums as storage_enums, - enums::{AttemptStatus, AuthenticationType, Currency, PaymentMethod}, -}; use common_utils::errors::{CustomResult, ParsingError}; +use diesel_models::enums as storage_enums; use error_stack::{IntoReport, ResultExt}; -use router_env::logger; +use router_env::{logger, Flow}; -use super::types::{AnalyticsCollection, AnalyticsDataSource, LoadRow}; -use crate::analytics::types::QueryExecutionError; +use super::types::{AnalyticsCollection, AnalyticsDataSource, LoadRow, TableEngine}; +use crate::types::QueryExecutionError; pub type QueryResult = error_stack::Result; pub trait QueryFilter where @@ -89,12 +89,12 @@ impl GroupByClause for Granularity { let granularity_divisor = self.get_bucket_size(); builder - .add_group_by_clause(format!("DATE_TRUNC('{trunc_scale}', modified_at)")) + .add_group_by_clause(format!("DATE_TRUNC('{trunc_scale}', created_at)")) .attach_printable("Error adding time prune group by")?; if let Some(scale) = granularity_bucket_scale { builder .add_group_by_clause(format!( - "FLOOR(DATE_PART('{scale}', modified_at)/{granularity_divisor})" + "FLOOR(DATE_PART('{scale}', created_at)/{granularity_divisor})" )) .attach_printable("Error adding time binning group by")?; } @@ -102,6 +102,26 @@ impl GroupByClause for Granularity { } } +impl GroupByClause for Granularity { + fn set_group_by_clause( + &self, + builder: &mut QueryBuilder, + ) -> QueryResult<()> { + let interval = match self { + Self::OneMin => "toStartOfMinute(created_at)", + Self::FiveMin => "toStartOfFiveMinutes(created_at)", + Self::FifteenMin => "toStartOfFifteenMinutes(created_at)", + Self::ThirtyMin => "toStartOfInterval(created_at, INTERVAL 30 minute)", + Self::OneHour => "toStartOfHour(created_at)", + Self::OneDay => "toStartOfDay(created_at)", + }; + + builder + .add_group_by_clause(interval) + .attach_printable("Error adding interval group by") + } +} + #[derive(strum::Display)] #[strum(serialize_all = "lowercase")] pub enum TimeGranularityLevel { @@ -229,6 +249,76 @@ pub enum Aggregate { }, } +// Window functions in query +// --- +// Description - +// field: to_sql type value used as expr in aggregation +// partition_by: partition by fields in window +// order_by: order by fields and order (Ascending / Descending) in window +// alias: alias of window expr in query +// --- +// Usage - +// Window::Sum { +// field: "count", +// partition_by: Some(query_builder.transform_to_sql_values(&dimensions).switch()?), +// order_by: Some(("value", Descending)), +// alias: Some("total"), +// } +#[derive(Debug)] +pub enum Window { + Sum { + field: R, + partition_by: Option, + order_by: Option<(String, Order)>, + alias: Option<&'static str>, + }, + RowNumber { + field: R, + partition_by: Option, + order_by: Option<(String, Order)>, + alias: Option<&'static str>, + }, +} + +#[derive(Debug, Clone, Copy)] +pub enum Order { + Ascending, + Descending, +} + +impl ToString for Order { + fn to_string(&self) -> String { + String::from(match self { + Self::Ascending => "asc", + Self::Descending => "desc", + }) + } +} + +// Select TopN values for a group based on a metric +// --- +// Description - +// columns: Columns in group to select TopN values for +// count: N in TopN +// order_column: metric used to sort and limit TopN +// order: sort order of metric (Ascending / Descending) +// --- +// Usage - +// Use via add_top_n_clause fn of query_builder +// add_top_n_clause( +// &dimensions, +// distribution.distribution_cardinality.into(), +// "count", +// Order::Descending, +// ) +#[derive(Debug)] +pub struct TopN { + pub columns: String, + pub count: u64, + pub order_column: String, + pub order: Order, +} + #[derive(Debug)] pub struct QueryBuilder where @@ -239,13 +329,16 @@ where filters: Vec<(String, FilterTypes, String)>, group_by: Vec, having: Option>, + outer_select: Vec, + top_n: Option, table: AnalyticsCollection, distinct: bool, db_type: PhantomData, + table_engine: TableEngine, } pub trait ToSql { - fn to_sql(&self) -> error_stack::Result; + fn to_sql(&self, table_engine: &TableEngine) -> error_stack::Result; } /// Implement `ToSql` on arrays of types that impl `ToString`. @@ -253,7 +346,7 @@ macro_rules! impl_to_sql_for_to_string { ($($type:ty),+) => { $( impl ToSql for $type { - fn to_sql(&self) -> error_stack::Result { + fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result { Ok(self.to_string()) } } @@ -267,8 +360,10 @@ impl_to_sql_for_to_string!( &PaymentDimensions, &RefundDimensions, PaymentDimensions, + &PaymentDistributions, RefundDimensions, PaymentMethod, + PaymentMethodType, AuthenticationType, Connector, AttemptStatus, @@ -276,12 +371,18 @@ impl_to_sql_for_to_string!( storage_enums::RefundStatus, Currency, RefundType, + Flow, &String, &bool, - &u64 + &u64, + u64, + Order ); -#[allow(dead_code)] +impl_to_sql_for_to_string!(&SdkEventDimensions, SdkEventDimensions, SdkEventNames); + +impl_to_sql_for_to_string!(&ApiEventDimensions, ApiEventDimensions); + #[derive(Debug)] pub enum FilterTypes { Equal, @@ -290,6 +391,23 @@ pub enum FilterTypes { Gte, Lte, Gt, + Like, + NotLike, + IsNotNull, +} + +pub fn filter_type_to_sql(l: &String, op: &FilterTypes, r: &String) -> String { + match op { + FilterTypes::EqualBool => format!("{l} = {r}"), + FilterTypes::Equal => format!("{l} = '{r}'"), + FilterTypes::In => format!("{l} IN ({r})"), + FilterTypes::Gte => format!("{l} >= '{r}'"), + FilterTypes::Gt => format!("{l} > {r}"), + FilterTypes::Lte => format!("{l} <= '{r}'"), + FilterTypes::Like => format!("{l} LIKE '%{r}%'"), + FilterTypes::NotLike => format!("{l} NOT LIKE '%{r}%'"), + FilterTypes::IsNotNull => format!("{l} IS NOT NULL"), + } } impl QueryBuilder @@ -303,22 +421,68 @@ where filters: Default::default(), group_by: Default::default(), having: Default::default(), + outer_select: Default::default(), + top_n: Default::default(), table, distinct: Default::default(), db_type: Default::default(), + table_engine: T::get_table_engine(table), } } pub fn add_select_column(&mut self, column: impl ToSql) -> QueryResult<()> { self.columns.push( column - .to_sql() + .to_sql(&self.table_engine) .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Error serializing select column")?, ); Ok(()) } + pub fn transform_to_sql_values(&mut self, values: &[impl ToSql]) -> QueryResult { + let res = values + .iter() + .map(|i| i.to_sql(&self.table_engine)) + .collect::, ParsingError>>() + .change_context(QueryBuildingError::SqlSerializeError) + .attach_printable("Error serializing range filter value")? + .join(", "); + Ok(res) + } + + pub fn add_top_n_clause( + &mut self, + columns: &[impl ToSql], + count: u64, + order_column: impl ToSql, + order: Order, + ) -> QueryResult<()> + where + Window<&'static str>: ToSql, + { + let partition_by_columns = self.transform_to_sql_values(columns)?; + let order_by_column = order_column + .to_sql(&self.table_engine) + .change_context(QueryBuildingError::SqlSerializeError) + .attach_printable("Error serializing select column")?; + + self.add_outer_select_column(Window::RowNumber { + field: "", + partition_by: Some(partition_by_columns.clone()), + order_by: Some((order_by_column.clone(), order)), + alias: Some("top_n"), + })?; + + self.top_n = Some(TopN { + columns: partition_by_columns, + count, + order_column: order_by_column, + order, + }); + Ok(()) + } + pub fn set_distinct(&mut self) { self.distinct = true } @@ -346,11 +510,11 @@ where comparison: FilterTypes, ) -> QueryResult<()> { self.filters.push(( - lhs.to_sql() + lhs.to_sql(&self.table_engine) .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Error serializing filter key")?, comparison, - rhs.to_sql() + rhs.to_sql(&self.table_engine) .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Error serializing filter value")?, )); @@ -366,7 +530,7 @@ where .iter() .map(|i| { // trimming whitespaces from the filter values received in request, to prevent a possibility of an SQL injection - i.to_sql().map(|s| { + i.to_sql(&self.table_engine).map(|s| { let trimmed_str = s.replace(' ', ""); format!("'{trimmed_str}'") }) @@ -381,7 +545,7 @@ where pub fn add_group_by_clause(&mut self, column: impl ToSql) -> QueryResult<()> { self.group_by.push( column - .to_sql() + .to_sql(&self.table_engine) .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Error serializing group by field")?, ); @@ -406,14 +570,7 @@ where fn get_filter_clause(&self) -> String { self.filters .iter() - .map(|(l, op, r)| match op { - FilterTypes::EqualBool => format!("{l} = {r}"), - FilterTypes::Equal => format!("{l} = '{r}'"), - FilterTypes::In => format!("{l} IN ({r})"), - FilterTypes::Gte => format!("{l} >= '{r}'"), - FilterTypes::Gt => format!("{l} > {r}"), - FilterTypes::Lte => format!("{l} <= '{r}'"), - }) + .map(|(l, op, r)| filter_type_to_sql(l, op, r)) .collect::>() .join(" AND ") } @@ -426,7 +583,10 @@ where self.group_by.join(", ") } - #[allow(dead_code)] + fn get_outer_select_clause(&self) -> String { + self.outer_select.join(", ") + } + pub fn add_having_clause( &mut self, aggregate: Aggregate, @@ -437,11 +597,11 @@ where Aggregate: ToSql, { let aggregate = aggregate - .to_sql() + .to_sql(&self.table_engine) .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Error serializing having aggregate")?; let value = value - .to_sql() + .to_sql(&self.table_engine) .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Error serializing having value")?; let entry = (aggregate, filter_type, value); @@ -453,16 +613,20 @@ where Ok(()) } + pub fn add_outer_select_column(&mut self, column: impl ToSql) -> QueryResult<()> { + self.outer_select.push( + column + .to_sql(&self.table_engine) + .change_context(QueryBuildingError::SqlSerializeError) + .attach_printable("Error serializing outer select column")?, + ); + Ok(()) + } + pub fn get_filter_type_clause(&self) -> Option { self.having.as_ref().map(|vec| { vec.iter() - .map(|(l, op, r)| match op { - FilterTypes::Equal | FilterTypes::EqualBool => format!("{l} = {r}"), - FilterTypes::In => format!("{l} IN ({r})"), - FilterTypes::Gte => format!("{l} >= {r}"), - FilterTypes::Lte => format!("{l} < {r}"), - FilterTypes::Gt => format!("{l} > {r}"), - }) + .map(|(l, op, r)| filter_type_to_sql(l, op, r)) .collect::>() .join(" AND ") }) @@ -471,6 +635,7 @@ where pub fn build_query(&mut self) -> QueryResult where Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, { if self.columns.is_empty() { Err(QueryBuildingError::InvalidQuery( @@ -491,7 +656,7 @@ where query.push_str( &self .table - .to_sql() + .to_sql(&self.table_engine) .change_context(QueryBuildingError::SqlSerializeError) .attach_printable("Error serializing table value")?, ); @@ -504,6 +669,16 @@ where if !self.group_by.is_empty() { query.push_str(" GROUP BY "); query.push_str(&self.get_group_by_clause()); + if let TableEngine::CollapsingMergeTree { sign } = self.table_engine { + self.add_having_clause( + Aggregate::Count { + field: Some(sign), + alias: None, + }, + FilterTypes::Gte, + "1", + )?; + } } if self.having.is_some() { @@ -512,6 +687,22 @@ where query.push_str(condition.as_str()); } } + + if !self.outer_select.is_empty() { + query.insert_str( + 0, + format!("SELECT {} FROM (", &self.get_outer_select_clause()).as_str(), + ); + query.push_str(") _"); + } + + if let Some(top_n) = &self.top_n { + query.insert_str(0, "SELECT * FROM ("); + query.push_str(format!(") _ WHERE top_n <= {}", top_n.count).as_str()); + } + + println!("{}", query); + Ok(query) } @@ -522,6 +713,7 @@ where where P: LoadRow, Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, { let query = self .build_query() diff --git a/crates/router/src/analytics/refunds.rs b/crates/analytics/src/refunds.rs similarity index 81% rename from crates/router/src/analytics/refunds.rs rename to crates/analytics/src/refunds.rs index a8b52effe76d..53481e232817 100644 --- a/crates/router/src/analytics/refunds.rs +++ b/crates/analytics/src/refunds.rs @@ -7,4 +7,4 @@ pub mod types; pub use accumulator::{RefundMetricAccumulator, RefundMetricsAccumulator}; pub trait RefundAnalytics: metrics::RefundMetricAnalytics {} -pub use self::core::get_metrics; +pub use self::core::{get_filters, get_metrics}; diff --git a/crates/router/src/analytics/refunds/accumulator.rs b/crates/analytics/src/refunds/accumulator.rs similarity index 98% rename from crates/router/src/analytics/refunds/accumulator.rs rename to crates/analytics/src/refunds/accumulator.rs index 3d0c0e659f6c..9c51defdcf91 100644 --- a/crates/router/src/analytics/refunds/accumulator.rs +++ b/crates/analytics/src/refunds/accumulator.rs @@ -1,5 +1,5 @@ use api_models::analytics::refunds::RefundMetricsBucketValue; -use common_enums::enums as storage_enums; +use diesel_models::enums as storage_enums; use super::metrics::RefundMetricRow; #[derive(Debug, Default)] @@ -15,13 +15,11 @@ pub struct SuccessRateAccumulator { pub success: i64, pub total: i64, } - #[derive(Debug, Default)] #[repr(transparent)] pub struct CountAccumulator { pub count: Option, } - #[derive(Debug, Default)] #[repr(transparent)] pub struct SumAccumulator { diff --git a/crates/analytics/src/refunds/core.rs b/crates/analytics/src/refunds/core.rs new file mode 100644 index 000000000000..25a1e228f567 --- /dev/null +++ b/crates/analytics/src/refunds/core.rs @@ -0,0 +1,203 @@ +#![allow(dead_code)] +use std::collections::HashMap; + +use api_models::analytics::{ + refunds::{ + RefundDimensions, RefundMetrics, RefundMetricsBucketIdentifier, RefundMetricsBucketResponse, + }, + AnalyticsMetadata, GetRefundFilterRequest, GetRefundMetricRequest, MetricsResponse, + RefundFilterValue, RefundFiltersResponse, +}; +use error_stack::{IntoReport, ResultExt}; +use router_env::{ + logger, + tracing::{self, Instrument}, +}; + +use super::{ + filters::{get_refund_filter_for_dimension, RefundFilterRow}, + RefundMetricsAccumulator, +}; +use crate::{ + errors::{AnalyticsError, AnalyticsResult}, + metrics, + refunds::RefundMetricAccumulator, + AnalyticsProvider, +}; + +pub async fn get_metrics( + pool: &AnalyticsProvider, + merchant_id: &String, + req: GetRefundMetricRequest, +) -> AnalyticsResult> { + let mut metrics_accumulator: HashMap = + HashMap::new(); + let mut set = tokio::task::JoinSet::new(); + for metric_type in req.metrics.iter().cloned() { + let req = req.clone(); + let pool = pool.clone(); + let task_span = tracing::debug_span!( + "analytics_refund_query", + refund_metric = metric_type.as_ref() + ); + // Currently JoinSet works with only static lifetime references even if the task pool does not outlive the given reference + // We can optimize away this clone once that is fixed + let merchant_id_scoped = merchant_id.to_owned(); + set.spawn( + async move { + let data = pool + .get_refund_metrics( + &metric_type, + &req.group_by_names.clone(), + &merchant_id_scoped, + &req.filters, + &req.time_series.map(|t| t.granularity), + &req.time_range, + ) + .await + .change_context(AnalyticsError::UnknownError); + (metric_type, data) + } + .instrument(task_span), + ); + } + + while let Some((metric, data)) = set + .join_next() + .await + .transpose() + .into_report() + .change_context(AnalyticsError::UnknownError)? + { + let data = data?; + let attributes = &[ + metrics::request::add_attributes("metric_type", metric.to_string()), + metrics::request::add_attributes("source", pool.to_string()), + ]; + + let value = u64::try_from(data.len()); + if let Ok(val) = value { + metrics::BUCKETS_FETCHED.record(&metrics::CONTEXT, val, attributes); + logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val); + } + + for (id, value) in data { + logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for metric {metric}"); + let metrics_builder = metrics_accumulator.entry(id).or_default(); + match metric { + RefundMetrics::RefundSuccessRate => metrics_builder + .refund_success_rate + .add_metrics_bucket(&value), + RefundMetrics::RefundCount => { + metrics_builder.refund_count.add_metrics_bucket(&value) + } + RefundMetrics::RefundSuccessCount => { + metrics_builder.refund_success.add_metrics_bucket(&value) + } + RefundMetrics::RefundProcessedAmount => { + metrics_builder.processed_amount.add_metrics_bucket(&value) + } + } + } + + logger::debug!( + "Analytics Accumulated Results: metric: {}, results: {:#?}", + metric, + metrics_accumulator + ); + } + let query_data: Vec = metrics_accumulator + .into_iter() + .map(|(id, val)| RefundMetricsBucketResponse { + values: val.collect(), + dimensions: id, + }) + .collect(); + + Ok(MetricsResponse { + query_data, + meta_data: [AnalyticsMetadata { + current_time_range: req.time_range, + }], + }) +} + +pub async fn get_filters( + pool: &AnalyticsProvider, + req: GetRefundFilterRequest, + merchant_id: &String, +) -> AnalyticsResult { + let mut res = RefundFiltersResponse::default(); + for dim in req.group_by_names { + let values = match pool { + AnalyticsProvider::Sqlx(pool) => { + get_refund_filter_for_dimension(dim, merchant_id, &req.time_range, pool) + .await + } + AnalyticsProvider::Clickhouse(pool) => { + get_refund_filter_for_dimension(dim, merchant_id, &req.time_range, pool) + .await + } + AnalyticsProvider::CombinedCkh(sqlx_pool, ckh_pool) => { + let ckh_result = get_refund_filter_for_dimension( + dim, + merchant_id, + &req.time_range, + ckh_pool, + ) + .await; + let sqlx_result = get_refund_filter_for_dimension( + dim, + merchant_id, + &req.time_range, + sqlx_pool, + ) + .await; + match (&sqlx_result, &ckh_result) { + (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { + router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres refunds analytics filters") + }, + _ => {} + }; + ckh_result + } + AnalyticsProvider::CombinedSqlx(sqlx_pool, ckh_pool) => { + let ckh_result = get_refund_filter_for_dimension( + dim, + merchant_id, + &req.time_range, + ckh_pool, + ) + .await; + let sqlx_result = get_refund_filter_for_dimension( + dim, + merchant_id, + &req.time_range, + sqlx_pool, + ) + .await; + match (&sqlx_result, &ckh_result) { + (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => { + router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres refunds analytics filters") + }, + _ => {} + }; + sqlx_result + } + } + .change_context(AnalyticsError::UnknownError)? + .into_iter() + .filter_map(|fil: RefundFilterRow| match dim { + RefundDimensions::Currency => fil.currency.map(|i| i.as_ref().to_string()), + RefundDimensions::RefundStatus => fil.refund_status.map(|i| i.as_ref().to_string()), + RefundDimensions::Connector => fil.connector, + RefundDimensions::RefundType => fil.refund_type.map(|i| i.as_ref().to_string()), + }) + .collect::>(); + res.query_data.push(RefundFilterValue { + dimension: dim, + values, + }) + } + Ok(res) +} diff --git a/crates/router/src/analytics/refunds/filters.rs b/crates/analytics/src/refunds/filters.rs similarity index 90% rename from crates/router/src/analytics/refunds/filters.rs rename to crates/analytics/src/refunds/filters.rs index 6b45e9194fad..29375483eb9a 100644 --- a/crates/router/src/analytics/refunds/filters.rs +++ b/crates/analytics/src/refunds/filters.rs @@ -2,13 +2,13 @@ use api_models::analytics::{ refunds::{RefundDimensions, RefundType}, Granularity, TimeRange, }; -use common_enums::enums::{Currency, RefundStatus}; use common_utils::errors::ReportSwitchExt; +use diesel_models::enums::{Currency, RefundStatus}; use error_stack::ResultExt; use time::PrimitiveDateTime; -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql}, +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window}, types::{ AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, FiltersError, FiltersResult, LoadRow, @@ -28,6 +28,7 @@ where AnalyticsCollection: ToSql, Granularity: GroupByClause, Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, { let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::Refund); @@ -49,8 +50,7 @@ where .change_context(FiltersError::QueryBuildingError)? .change_context(FiltersError::QueryExecutionFailure) } - -#[derive(Debug, serde::Serialize, Eq, PartialEq)] +#[derive(Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)] pub struct RefundFilterRow { pub currency: Option>, pub refund_status: Option>, diff --git a/crates/router/src/analytics/refunds/metrics.rs b/crates/analytics/src/refunds/metrics.rs similarity index 91% rename from crates/router/src/analytics/refunds/metrics.rs rename to crates/analytics/src/refunds/metrics.rs index d4f509b4a1e3..10cd03546772 100644 --- a/crates/router/src/analytics/refunds/metrics.rs +++ b/crates/analytics/src/refunds/metrics.rs @@ -4,7 +4,7 @@ use api_models::analytics::{ }, Granularity, TimeRange, }; -use common_enums::enums as storage_enums; +use diesel_models::enums as storage_enums; use time::PrimitiveDateTime; mod refund_count; mod refund_processed_amount; @@ -15,12 +15,11 @@ use refund_processed_amount::RefundProcessedAmount; use refund_success_count::RefundSuccessCount; use refund_success_rate::RefundSuccessRate; -use crate::analytics::{ - query::{Aggregate, GroupByClause, ToSql}, +use crate::{ + query::{Aggregate, GroupByClause, ToSql, Window}, types::{AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, MetricsResult}, }; - -#[derive(Debug, Eq, PartialEq)] +#[derive(Debug, Eq, PartialEq, serde::Deserialize)] pub struct RefundMetricRow { pub currency: Option>, pub refund_status: Option>, @@ -28,7 +27,9 @@ pub struct RefundMetricRow { pub refund_type: Option>, pub total: Option, pub count: Option, + #[serde(with = "common_utils::custom_serde::iso8601::option")] pub start_bucket: Option, + #[serde(with = "common_utils::custom_serde::iso8601::option")] pub end_bucket: Option, } @@ -42,6 +43,7 @@ where AnalyticsCollection: ToSql, Granularity: GroupByClause, Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, { async fn load_metrics( &self, @@ -62,6 +64,7 @@ where AnalyticsCollection: ToSql, Granularity: GroupByClause, Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, { async fn load_metrics( &self, diff --git a/crates/router/src/analytics/refunds/metrics/refund_count.rs b/crates/analytics/src/refunds/metrics/refund_count.rs similarity index 94% rename from crates/router/src/analytics/refunds/metrics/refund_count.rs rename to crates/analytics/src/refunds/metrics/refund_count.rs index 471327235073..cf3c7a509278 100644 --- a/crates/router/src/analytics/refunds/metrics/refund_count.rs +++ b/crates/analytics/src/refunds/metrics/refund_count.rs @@ -7,8 +7,8 @@ use error_stack::ResultExt; use time::PrimitiveDateTime; use super::RefundMetricRow; -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window}, types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, }; @@ -23,6 +23,7 @@ where AnalyticsCollection: ToSql, Granularity: GroupByClause, Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, { async fn load_metrics( &self, @@ -93,7 +94,7 @@ where Ok(( RefundMetricsBucketIdentifier::new( i.currency.as_ref().map(|i| i.0), - i.refund_status.as_ref().map(|i| i.0), + i.refund_status.as_ref().map(|i| i.0.to_string()), i.connector.clone(), i.refund_type.as_ref().map(|i| i.0.to_string()), TimeRange { @@ -110,7 +111,7 @@ where i, )) }) - .collect::, crate::analytics::query::PostProcessingError>>() + .collect::, crate::query::PostProcessingError>>() .change_context(MetricsError::PostProcessingFailure) } } diff --git a/crates/router/src/analytics/refunds/metrics/refund_processed_amount.rs b/crates/analytics/src/refunds/metrics/refund_processed_amount.rs similarity index 95% rename from crates/router/src/analytics/refunds/metrics/refund_processed_amount.rs rename to crates/analytics/src/refunds/metrics/refund_processed_amount.rs index c5f3a706aaef..661fca57b282 100644 --- a/crates/router/src/analytics/refunds/metrics/refund_processed_amount.rs +++ b/crates/analytics/src/refunds/metrics/refund_processed_amount.rs @@ -2,14 +2,14 @@ use api_models::analytics::{ refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier}, Granularity, TimeRange, }; -use common_enums::enums as storage_enums; use common_utils::errors::ReportSwitchExt; +use diesel_models::enums as storage_enums; use error_stack::ResultExt; use time::PrimitiveDateTime; use super::RefundMetricRow; -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window}, types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, }; #[derive(Default)] @@ -23,6 +23,7 @@ where AnalyticsCollection: ToSql, Granularity: GroupByClause, Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, { async fn load_metrics( &self, @@ -116,7 +117,7 @@ where i, )) }) - .collect::, crate::analytics::query::PostProcessingError>>() + .collect::, crate::query::PostProcessingError>>() .change_context(MetricsError::PostProcessingFailure) } } diff --git a/crates/router/src/analytics/refunds/metrics/refund_success_count.rs b/crates/analytics/src/refunds/metrics/refund_success_count.rs similarity index 95% rename from crates/router/src/analytics/refunds/metrics/refund_success_count.rs rename to crates/analytics/src/refunds/metrics/refund_success_count.rs index 0c8032908fd7..bc09d8b7ab64 100644 --- a/crates/router/src/analytics/refunds/metrics/refund_success_count.rs +++ b/crates/analytics/src/refunds/metrics/refund_success_count.rs @@ -2,14 +2,14 @@ use api_models::analytics::{ refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier}, Granularity, TimeRange, }; -use common_enums::enums as storage_enums; use common_utils::errors::ReportSwitchExt; +use diesel_models::enums as storage_enums; use error_stack::ResultExt; use time::PrimitiveDateTime; use super::RefundMetricRow; -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window}, types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, }; @@ -24,6 +24,7 @@ where AnalyticsCollection: ToSql, Granularity: GroupByClause, Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, { async fn load_metrics( &self, @@ -115,7 +116,7 @@ where }) .collect::, - crate::analytics::query::PostProcessingError, + crate::query::PostProcessingError, >>() .change_context(MetricsError::PostProcessingFailure) } diff --git a/crates/router/src/analytics/refunds/metrics/refund_success_rate.rs b/crates/analytics/src/refunds/metrics/refund_success_rate.rs similarity index 96% rename from crates/router/src/analytics/refunds/metrics/refund_success_rate.rs rename to crates/analytics/src/refunds/metrics/refund_success_rate.rs index 42f9ccf8d3c0..29b73b885d8e 100644 --- a/crates/router/src/analytics/refunds/metrics/refund_success_rate.rs +++ b/crates/analytics/src/refunds/metrics/refund_success_rate.rs @@ -7,8 +7,8 @@ use error_stack::ResultExt; use time::PrimitiveDateTime; use super::RefundMetricRow; -use crate::analytics::{ - query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql}, +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window}, types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, }; #[derive(Default)] @@ -22,6 +22,7 @@ where AnalyticsCollection: ToSql, Granularity: GroupByClause, Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, { async fn load_metrics( &self, @@ -110,7 +111,7 @@ where }) .collect::, - crate::analytics::query::PostProcessingError, + crate::query::PostProcessingError, >>() .change_context(MetricsError::PostProcessingFailure) } diff --git a/crates/router/src/analytics/refunds/types.rs b/crates/analytics/src/refunds/types.rs similarity index 98% rename from crates/router/src/analytics/refunds/types.rs rename to crates/analytics/src/refunds/types.rs index fbfd69972671..d7d739e1aba7 100644 --- a/crates/router/src/analytics/refunds/types.rs +++ b/crates/analytics/src/refunds/types.rs @@ -1,7 +1,7 @@ use api_models::analytics::refunds::{RefundDimensions, RefundFilters}; use error_stack::ResultExt; -use crate::analytics::{ +use crate::{ query::{QueryBuilder, QueryFilter, QueryResult, ToSql}, types::{AnalyticsCollection, AnalyticsDataSource}, }; diff --git a/crates/analytics/src/sdk_events.rs b/crates/analytics/src/sdk_events.rs new file mode 100644 index 000000000000..fe8af7cfe2df --- /dev/null +++ b/crates/analytics/src/sdk_events.rs @@ -0,0 +1,14 @@ +pub mod accumulator; +mod core; +pub mod events; +pub mod filters; +pub mod metrics; +pub mod types; +pub use accumulator::{SdkEventMetricAccumulator, SdkEventMetricsAccumulator}; +pub trait SDKEventAnalytics: events::SdkEventsFilterAnalytics {} +pub trait SdkEventAnalytics: + metrics::SdkEventMetricAnalytics + filters::SdkEventFilterAnalytics +{ +} + +pub use self::core::{get_filters, get_metrics, sdk_events_core}; diff --git a/crates/analytics/src/sdk_events/accumulator.rs b/crates/analytics/src/sdk_events/accumulator.rs new file mode 100644 index 000000000000..ab9e9309434f --- /dev/null +++ b/crates/analytics/src/sdk_events/accumulator.rs @@ -0,0 +1,98 @@ +use api_models::analytics::sdk_events::SdkEventMetricsBucketValue; +use router_env::logger; + +use super::metrics::SdkEventMetricRow; + +#[derive(Debug, Default)] +pub struct SdkEventMetricsAccumulator { + pub payment_attempts: CountAccumulator, + pub payment_success: CountAccumulator, + pub payment_methods_call_count: CountAccumulator, + pub average_payment_time: AverageAccumulator, + pub sdk_initiated_count: CountAccumulator, + pub sdk_rendered_count: CountAccumulator, + pub payment_method_selected_count: CountAccumulator, + pub payment_data_filled_count: CountAccumulator, +} + +#[derive(Debug, Default)] +#[repr(transparent)] +pub struct CountAccumulator { + pub count: Option, +} + +#[derive(Debug, Default)] +pub struct AverageAccumulator { + pub total: u32, + pub count: u32, +} + +pub trait SdkEventMetricAccumulator { + type MetricOutput; + + fn add_metrics_bucket(&mut self, metrics: &SdkEventMetricRow); + + fn collect(self) -> Self::MetricOutput; +} + +impl SdkEventMetricAccumulator for CountAccumulator { + type MetricOutput = Option; + #[inline] + fn add_metrics_bucket(&mut self, metrics: &SdkEventMetricRow) { + self.count = match (self.count, metrics.count) { + (None, None) => None, + (None, i @ Some(_)) | (i @ Some(_), None) => i, + (Some(a), Some(b)) => Some(a + b), + } + } + #[inline] + fn collect(self) -> Self::MetricOutput { + self.count.and_then(|i| u64::try_from(i).ok()) + } +} + +impl SdkEventMetricAccumulator for AverageAccumulator { + type MetricOutput = Option; + + fn add_metrics_bucket(&mut self, metrics: &SdkEventMetricRow) { + let total = metrics + .total + .as_ref() + .and_then(bigdecimal::ToPrimitive::to_u32); + let count = metrics.count.and_then(|total| u32::try_from(total).ok()); + + match (total, count) { + (Some(total), Some(count)) => { + self.total += total; + self.count += count; + } + _ => { + logger::error!(message="Dropping metrics for average accumulator", metric=?metrics); + } + } + } + + fn collect(self) -> Self::MetricOutput { + if self.count == 0 { + None + } else { + Some(f64::from(self.total) / f64::from(self.count)) + } + } +} + +impl SdkEventMetricsAccumulator { + #[allow(dead_code)] + pub fn collect(self) -> SdkEventMetricsBucketValue { + SdkEventMetricsBucketValue { + payment_attempts: self.payment_attempts.collect(), + payment_success_count: self.payment_success.collect(), + payment_methods_call_count: self.payment_methods_call_count.collect(), + average_payment_time: self.average_payment_time.collect(), + sdk_initiated_count: self.sdk_initiated_count.collect(), + sdk_rendered_count: self.sdk_rendered_count.collect(), + payment_method_selected_count: self.payment_method_selected_count.collect(), + payment_data_filled_count: self.payment_data_filled_count.collect(), + } + } +} diff --git a/crates/analytics/src/sdk_events/core.rs b/crates/analytics/src/sdk_events/core.rs new file mode 100644 index 000000000000..34f23c745b05 --- /dev/null +++ b/crates/analytics/src/sdk_events/core.rs @@ -0,0 +1,201 @@ +use std::collections::HashMap; + +use api_models::analytics::{ + sdk_events::{ + MetricsBucketResponse, SdkEventMetrics, SdkEventMetricsBucketIdentifier, SdkEventsRequest, + }, + AnalyticsMetadata, GetSdkEventFiltersRequest, GetSdkEventMetricRequest, MetricsResponse, + SdkEventFiltersResponse, +}; +use error_stack::{IntoReport, ResultExt}; +use router_env::{instrument, logger, tracing}; + +use super::{ + events::{get_sdk_event, SdkEventsResult}, + SdkEventMetricsAccumulator, +}; +use crate::{ + errors::{AnalyticsError, AnalyticsResult}, + sdk_events::SdkEventMetricAccumulator, + types::FiltersError, + AnalyticsProvider, +}; + +#[instrument(skip_all)] +pub async fn sdk_events_core( + pool: &AnalyticsProvider, + req: SdkEventsRequest, + publishable_key: String, +) -> AnalyticsResult> { + match pool { + AnalyticsProvider::Sqlx(_) => Err(FiltersError::NotImplemented) + .into_report() + .attach_printable("SQL Analytics is not implemented for Sdk Events"), + AnalyticsProvider::Clickhouse(pool) => get_sdk_event(&publishable_key, req, pool).await, + AnalyticsProvider::CombinedSqlx(_sqlx_pool, ckh_pool) + | AnalyticsProvider::CombinedCkh(_sqlx_pool, ckh_pool) => { + get_sdk_event(&publishable_key, req, ckh_pool).await + } + } + .change_context(AnalyticsError::UnknownError) +} + +#[instrument(skip_all)] +pub async fn get_metrics( + pool: &AnalyticsProvider, + publishable_key: Option<&String>, + req: GetSdkEventMetricRequest, +) -> AnalyticsResult> { + let mut metrics_accumulator: HashMap< + SdkEventMetricsBucketIdentifier, + SdkEventMetricsAccumulator, + > = HashMap::new(); + + if let Some(publishable_key) = publishable_key { + let mut set = tokio::task::JoinSet::new(); + for metric_type in req.metrics.iter().cloned() { + let req = req.clone(); + let publishable_key_scoped = publishable_key.to_owned(); + let pool = pool.clone(); + set.spawn(async move { + let data = pool + .get_sdk_event_metrics( + &metric_type, + &req.group_by_names.clone(), + &publishable_key_scoped, + &req.filters, + &req.time_series.map(|t| t.granularity), + &req.time_range, + ) + .await + .change_context(AnalyticsError::UnknownError); + (metric_type, data) + }); + } + + while let Some((metric, data)) = set + .join_next() + .await + .transpose() + .into_report() + .change_context(AnalyticsError::UnknownError)? + { + logger::info!("Logging Result {:?}", data); + for (id, value) in data? { + let metrics_builder = metrics_accumulator.entry(id).or_default(); + match metric { + SdkEventMetrics::PaymentAttempts => { + metrics_builder.payment_attempts.add_metrics_bucket(&value) + } + SdkEventMetrics::PaymentSuccessCount => { + metrics_builder.payment_success.add_metrics_bucket(&value) + } + SdkEventMetrics::PaymentMethodsCallCount => metrics_builder + .payment_methods_call_count + .add_metrics_bucket(&value), + SdkEventMetrics::SdkRenderedCount => metrics_builder + .sdk_rendered_count + .add_metrics_bucket(&value), + SdkEventMetrics::SdkInitiatedCount => metrics_builder + .sdk_initiated_count + .add_metrics_bucket(&value), + SdkEventMetrics::PaymentMethodSelectedCount => metrics_builder + .payment_method_selected_count + .add_metrics_bucket(&value), + SdkEventMetrics::PaymentDataFilledCount => metrics_builder + .payment_data_filled_count + .add_metrics_bucket(&value), + SdkEventMetrics::AveragePaymentTime => metrics_builder + .average_payment_time + .add_metrics_bucket(&value), + } + } + + logger::debug!( + "Analytics Accumulated Results: metric: {}, results: {:#?}", + metric, + metrics_accumulator + ); + } + + let query_data: Vec = metrics_accumulator + .into_iter() + .map(|(id, val)| MetricsBucketResponse { + values: val.collect(), + dimensions: id, + }) + .collect(); + + Ok(MetricsResponse { + query_data, + meta_data: [AnalyticsMetadata { + current_time_range: req.time_range, + }], + }) + } else { + logger::error!("Publishable key not present for merchant ID"); + Ok(MetricsResponse { + query_data: vec![], + meta_data: [AnalyticsMetadata { + current_time_range: req.time_range, + }], + }) + } +} + +#[allow(dead_code)] +pub async fn get_filters( + pool: &AnalyticsProvider, + req: GetSdkEventFiltersRequest, + publishable_key: Option<&String>, +) -> AnalyticsResult { + use api_models::analytics::{sdk_events::SdkEventDimensions, SdkEventFilterValue}; + + use super::filters::get_sdk_event_filter_for_dimension; + use crate::sdk_events::filters::SdkEventFilter; + + let mut res = SdkEventFiltersResponse::default(); + + if let Some(publishable_key) = publishable_key { + for dim in req.group_by_names { + let values = match pool { + AnalyticsProvider::Sqlx(_pool) => Err(FiltersError::NotImplemented) + .into_report() + .attach_printable("SQL Analytics is not implemented for SDK Events"), + AnalyticsProvider::Clickhouse(pool) => { + get_sdk_event_filter_for_dimension(dim, publishable_key, &req.time_range, pool) + .await + } + AnalyticsProvider::CombinedSqlx(_sqlx_pool, ckh_pool) + | AnalyticsProvider::CombinedCkh(_sqlx_pool, ckh_pool) => { + get_sdk_event_filter_for_dimension( + dim, + publishable_key, + &req.time_range, + ckh_pool, + ) + .await + } + } + .change_context(AnalyticsError::UnknownError)? + .into_iter() + .filter_map(|fil: SdkEventFilter| match dim { + SdkEventDimensions::PaymentMethod => fil.payment_method, + SdkEventDimensions::Platform => fil.platform, + SdkEventDimensions::BrowserName => fil.browser_name, + SdkEventDimensions::Source => fil.source, + SdkEventDimensions::Component => fil.component, + SdkEventDimensions::PaymentExperience => fil.payment_experience, + }) + .collect::>(); + res.query_data.push(SdkEventFilterValue { + dimension: dim, + values, + }) + } + } else { + router_env::logger::error!("Publishable key not found for merchant"); + } + + Ok(res) +} diff --git a/crates/analytics/src/sdk_events/events.rs b/crates/analytics/src/sdk_events/events.rs new file mode 100644 index 000000000000..a4d044267e94 --- /dev/null +++ b/crates/analytics/src/sdk_events/events.rs @@ -0,0 +1,80 @@ +use api_models::analytics::{ + sdk_events::{SdkEventNames, SdkEventsRequest}, + Granularity, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use strum::IntoEnumIterator; +use time::PrimitiveDateTime; + +use crate::{ + query::{Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, FiltersError, FiltersResult, LoadRow}, +}; +pub trait SdkEventsFilterAnalytics: LoadRow {} + +pub async fn get_sdk_event( + merchant_id: &str, + request: SdkEventsRequest, + pool: &T, +) -> FiltersResult> +where + T: AnalyticsDataSource + SdkEventsFilterAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + let static_event_list = SdkEventNames::iter() + .map(|i| format!("'{}'", i.as_ref())) + .collect::>() + .join(","); + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::SdkEvents); + query_builder.add_select_column("*").switch()?; + + query_builder + .add_filter_clause("merchant_id", merchant_id) + .switch()?; + query_builder + .add_filter_clause("payment_id", request.payment_id) + .switch()?; + query_builder + .add_custom_filter_clause("event_name", static_event_list, FilterTypes::In) + .switch()?; + let _ = &request + .time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + //TODO!: update the execute_query function to return reports instead of plain errors... + query_builder + .execute_query::(pool) + .await + .change_context(FiltersError::QueryBuildingError)? + .change_context(FiltersError::QueryExecutionFailure) +} +#[derive(Debug, serde::Serialize, serde::Deserialize)] +pub struct SdkEventsResult { + pub merchant_id: String, + pub payment_id: String, + pub event_name: Option, + pub log_type: Option, + pub first_event: bool, + pub browser_name: Option, + pub browser_version: Option, + pub source: Option, + pub category: Option, + pub version: Option, + pub value: Option, + pub platform: Option, + pub component: Option, + pub payment_method: Option, + pub payment_experience: Option, + pub latency: Option, + #[serde(with = "common_utils::custom_serde::iso8601")] + pub created_at_precise: PrimitiveDateTime, + #[serde(with = "common_utils::custom_serde::iso8601")] + pub created_at: PrimitiveDateTime, +} diff --git a/crates/analytics/src/sdk_events/filters.rs b/crates/analytics/src/sdk_events/filters.rs new file mode 100644 index 000000000000..9963f51ef947 --- /dev/null +++ b/crates/analytics/src/sdk_events/filters.rs @@ -0,0 +1,56 @@ +use api_models::analytics::{sdk_events::SdkEventDimensions, Granularity, TimeRange}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, FiltersError, FiltersResult, LoadRow}, +}; + +pub trait SdkEventFilterAnalytics: LoadRow {} + +pub async fn get_sdk_event_filter_for_dimension( + dimension: SdkEventDimensions, + publishable_key: &String, + time_range: &TimeRange, + pool: &T, +) -> FiltersResult> +where + T: AnalyticsDataSource + SdkEventFilterAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::SdkEvents); + + query_builder.add_select_column(dimension).switch()?; + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + query_builder + .add_filter_clause("merchant_id", publishable_key) + .switch()?; + + query_builder.set_distinct(); + + query_builder + .execute_query::(pool) + .await + .change_context(FiltersError::QueryBuildingError)? + .change_context(FiltersError::QueryExecutionFailure) +} + +#[derive(Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)] +pub struct SdkEventFilter { + pub payment_method: Option, + pub platform: Option, + pub browser_name: Option, + pub source: Option, + pub component: Option, + pub payment_experience: Option, +} diff --git a/crates/analytics/src/sdk_events/metrics.rs b/crates/analytics/src/sdk_events/metrics.rs new file mode 100644 index 000000000000..354d2270d18a --- /dev/null +++ b/crates/analytics/src/sdk_events/metrics.rs @@ -0,0 +1,181 @@ +use api_models::analytics::{ + sdk_events::{ + SdkEventDimensions, SdkEventFilters, SdkEventMetrics, SdkEventMetricsBucketIdentifier, + }, + Granularity, TimeRange, +}; +use time::PrimitiveDateTime; + +use crate::{ + query::{Aggregate, GroupByClause, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, LoadRow, MetricsResult}, +}; + +mod average_payment_time; +mod payment_attempts; +mod payment_data_filled_count; +mod payment_method_selected_count; +mod payment_methods_call_count; +mod payment_success_count; +mod sdk_initiated_count; +mod sdk_rendered_count; + +use average_payment_time::AveragePaymentTime; +use payment_attempts::PaymentAttempts; +use payment_data_filled_count::PaymentDataFilledCount; +use payment_method_selected_count::PaymentMethodSelectedCount; +use payment_methods_call_count::PaymentMethodsCallCount; +use payment_success_count::PaymentSuccessCount; +use sdk_initiated_count::SdkInitiatedCount; +use sdk_rendered_count::SdkRenderedCount; + +#[derive(Debug, PartialEq, Eq, serde::Deserialize)] +pub struct SdkEventMetricRow { + pub total: Option, + pub count: Option, + pub time_bucket: Option, + pub payment_method: Option, + pub platform: Option, + pub browser_name: Option, + pub source: Option, + pub component: Option, + pub payment_experience: Option, +} + +pub trait SdkEventMetricAnalytics: LoadRow {} + +#[async_trait::async_trait] +pub trait SdkEventMetric +where + T: AnalyticsDataSource + SdkEventMetricAnalytics, +{ + async fn load_metrics( + &self, + dimensions: &[SdkEventDimensions], + publishable_key: &str, + filters: &SdkEventFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult>; +} + +#[async_trait::async_trait] +impl SdkEventMetric for SdkEventMetrics +where + T: AnalyticsDataSource + SdkEventMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[SdkEventDimensions], + publishable_key: &str, + filters: &SdkEventFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + match self { + Self::PaymentAttempts => { + PaymentAttempts + .load_metrics( + dimensions, + publishable_key, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::PaymentSuccessCount => { + PaymentSuccessCount + .load_metrics( + dimensions, + publishable_key, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::PaymentMethodsCallCount => { + PaymentMethodsCallCount + .load_metrics( + dimensions, + publishable_key, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::SdkRenderedCount => { + SdkRenderedCount + .load_metrics( + dimensions, + publishable_key, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::SdkInitiatedCount => { + SdkInitiatedCount + .load_metrics( + dimensions, + publishable_key, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::PaymentMethodSelectedCount => { + PaymentMethodSelectedCount + .load_metrics( + dimensions, + publishable_key, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::PaymentDataFilledCount => { + PaymentDataFilledCount + .load_metrics( + dimensions, + publishable_key, + filters, + granularity, + time_range, + pool, + ) + .await + } + Self::AveragePaymentTime => { + AveragePaymentTime + .load_metrics( + dimensions, + publishable_key, + filters, + granularity, + time_range, + pool, + ) + .await + } + } + } +} diff --git a/crates/analytics/src/sdk_events/metrics/average_payment_time.rs b/crates/analytics/src/sdk_events/metrics/average_payment_time.rs new file mode 100644 index 000000000000..db7171524ae5 --- /dev/null +++ b/crates/analytics/src/sdk_events/metrics/average_payment_time.rs @@ -0,0 +1,129 @@ +use api_models::analytics::{ + sdk_events::{ + SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames, + }, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::SdkEventMetricRow; +use crate::{ + query::{Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct AveragePaymentTime; + +#[async_trait::async_trait] +impl super::SdkEventMetric for AveragePaymentTime +where + T: AnalyticsDataSource + super::SdkEventMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[SdkEventDimensions], + publishable_key: &str, + filters: &SdkEventFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::SdkEvents); + let dimensions = dimensions.to_vec(); + + for dim in dimensions.iter() { + query_builder.add_select_column(dim).switch()?; + } + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + + query_builder + .add_select_column(Aggregate::Sum { + field: "latency", + alias: Some("total"), + }) + .switch()?; + + if let Some(granularity) = granularity.as_ref() { + query_builder + .add_granularity_in_mins(granularity) + .switch()?; + } + + filters.set_filter_clause(&mut query_builder).switch()?; + + query_builder + .add_filter_clause("merchant_id", publishable_key) + .switch()?; + + query_builder + .add_bool_filter_clause("first_event", 1) + .switch()?; + + query_builder + .add_filter_clause("event_name", SdkEventNames::PaymentAttempt) + .switch()?; + + query_builder + .add_custom_filter_clause("latency", 0, FilterTypes::Gt) + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + for dim in dimensions.iter() { + query_builder + .add_group_by_clause(dim) + .attach_printable("Error grouping by dimensions") + .switch()?; + } + + if let Some(_granularity) = granularity.as_ref() { + query_builder + .add_group_by_clause("time_bucket") + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + SdkEventMetricsBucketIdentifier::new( + i.payment_method.clone(), + i.platform.clone(), + i.browser_name.clone(), + i.source.clone(), + i.component.clone(), + i.payment_experience.clone(), + i.time_bucket.clone(), + ), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/analytics/src/sdk_events/metrics/payment_attempts.rs b/crates/analytics/src/sdk_events/metrics/payment_attempts.rs new file mode 100644 index 000000000000..b2a78188c4f2 --- /dev/null +++ b/crates/analytics/src/sdk_events/metrics/payment_attempts.rs @@ -0,0 +1,118 @@ +use api_models::analytics::{ + sdk_events::{ + SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames, + }, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::SdkEventMetricRow; +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct PaymentAttempts; + +#[async_trait::async_trait] +impl super::SdkEventMetric for PaymentAttempts +where + T: AnalyticsDataSource + super::SdkEventMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[SdkEventDimensions], + publishable_key: &str, + filters: &SdkEventFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::SdkEvents); + let dimensions = dimensions.to_vec(); + + for dim in dimensions.iter() { + query_builder.add_select_column(dim).switch()?; + } + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + + if let Some(granularity) = granularity.as_ref() { + query_builder + .add_granularity_in_mins(granularity) + .switch()?; + } + + filters.set_filter_clause(&mut query_builder).switch()?; + + query_builder + .add_filter_clause("merchant_id", publishable_key) + .switch()?; + + query_builder + .add_bool_filter_clause("first_event", 1) + .switch()?; + + query_builder + .add_filter_clause("event_name", SdkEventNames::PaymentAttempt) + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + for dim in dimensions.iter() { + query_builder + .add_group_by_clause(dim) + .attach_printable("Error grouping by dimensions") + .switch()?; + } + + if let Some(_granularity) = granularity.as_ref() { + query_builder + .add_group_by_clause("time_bucket") + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + SdkEventMetricsBucketIdentifier::new( + i.payment_method.clone(), + i.platform.clone(), + i.browser_name.clone(), + i.source.clone(), + i.component.clone(), + i.payment_experience.clone(), + i.time_bucket.clone(), + ), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/analytics/src/sdk_events/metrics/payment_data_filled_count.rs b/crates/analytics/src/sdk_events/metrics/payment_data_filled_count.rs new file mode 100644 index 000000000000..a3c94baeda26 --- /dev/null +++ b/crates/analytics/src/sdk_events/metrics/payment_data_filled_count.rs @@ -0,0 +1,118 @@ +use api_models::analytics::{ + sdk_events::{ + SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames, + }, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::SdkEventMetricRow; +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct PaymentDataFilledCount; + +#[async_trait::async_trait] +impl super::SdkEventMetric for PaymentDataFilledCount +where + T: AnalyticsDataSource + super::SdkEventMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[SdkEventDimensions], + publishable_key: &str, + filters: &SdkEventFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::SdkEvents); + let dimensions = dimensions.to_vec(); + + for dim in dimensions.iter() { + query_builder.add_select_column(dim).switch()?; + } + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + + if let Some(granularity) = granularity.as_ref() { + query_builder + .add_granularity_in_mins(granularity) + .switch()?; + } + + filters.set_filter_clause(&mut query_builder).switch()?; + + query_builder + .add_filter_clause("merchant_id", publishable_key) + .switch()?; + + query_builder + .add_bool_filter_clause("first_event", 1) + .switch()?; + + query_builder + .add_filter_clause("event_name", SdkEventNames::PaymentDataFilled) + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + for dim in dimensions.iter() { + query_builder + .add_group_by_clause(dim) + .attach_printable("Error grouping by dimensions") + .switch()?; + } + + if let Some(_granularity) = granularity.as_ref() { + query_builder + .add_group_by_clause("time_bucket") + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + SdkEventMetricsBucketIdentifier::new( + i.payment_method.clone(), + i.platform.clone(), + i.browser_name.clone(), + i.source.clone(), + i.component.clone(), + i.payment_experience.clone(), + i.time_bucket.clone(), + ), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/analytics/src/sdk_events/metrics/payment_method_selected_count.rs b/crates/analytics/src/sdk_events/metrics/payment_method_selected_count.rs new file mode 100644 index 000000000000..11aeac5e6ff9 --- /dev/null +++ b/crates/analytics/src/sdk_events/metrics/payment_method_selected_count.rs @@ -0,0 +1,118 @@ +use api_models::analytics::{ + sdk_events::{ + SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames, + }, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::SdkEventMetricRow; +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct PaymentMethodSelectedCount; + +#[async_trait::async_trait] +impl super::SdkEventMetric for PaymentMethodSelectedCount +where + T: AnalyticsDataSource + super::SdkEventMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[SdkEventDimensions], + publishable_key: &str, + filters: &SdkEventFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::SdkEvents); + let dimensions = dimensions.to_vec(); + + for dim in dimensions.iter() { + query_builder.add_select_column(dim).switch()?; + } + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + + if let Some(granularity) = granularity.as_ref() { + query_builder + .add_granularity_in_mins(granularity) + .switch()?; + } + + filters.set_filter_clause(&mut query_builder).switch()?; + + query_builder + .add_filter_clause("merchant_id", publishable_key) + .switch()?; + + query_builder + .add_bool_filter_clause("first_event", 1) + .switch()?; + + query_builder + .add_filter_clause("event_name", SdkEventNames::PaymentMethodChanged) + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + for dim in dimensions.iter() { + query_builder + .add_group_by_clause(dim) + .attach_printable("Error grouping by dimensions") + .switch()?; + } + + if let Some(_granularity) = granularity.as_ref() { + query_builder + .add_group_by_clause("time_bucket") + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + SdkEventMetricsBucketIdentifier::new( + i.payment_method.clone(), + i.platform.clone(), + i.browser_name.clone(), + i.source.clone(), + i.component.clone(), + i.payment_experience.clone(), + i.time_bucket.clone(), + ), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/analytics/src/sdk_events/metrics/payment_methods_call_count.rs b/crates/analytics/src/sdk_events/metrics/payment_methods_call_count.rs new file mode 100644 index 000000000000..7570f1292e5e --- /dev/null +++ b/crates/analytics/src/sdk_events/metrics/payment_methods_call_count.rs @@ -0,0 +1,126 @@ +use api_models::analytics::{ + sdk_events::{ + SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames, + }, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::SdkEventMetricRow; +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct PaymentMethodsCallCount; + +#[async_trait::async_trait] +impl super::SdkEventMetric for PaymentMethodsCallCount +where + T: AnalyticsDataSource + super::SdkEventMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[SdkEventDimensions], + publishable_key: &str, + filters: &SdkEventFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::SdkEvents); + let dimensions = dimensions.to_vec(); + + for dim in dimensions.iter() { + query_builder.add_select_column(dim).switch()?; + } + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + + if let Some(granularity) = granularity.as_ref() { + query_builder + .add_granularity_in_mins(granularity) + .switch()?; + } + + filters.set_filter_clause(&mut query_builder).switch()?; + + query_builder + .add_filter_clause("merchant_id", publishable_key) + .switch()?; + + query_builder + .add_bool_filter_clause("first_event", 1) + .switch()?; + + query_builder + .add_filter_clause("event_name", SdkEventNames::PaymentMethodsCall) + .switch()?; + + query_builder + .add_filter_clause("log_type", "INFO") + .switch()?; + + query_builder + .add_filter_clause("category", "API") + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + for dim in dimensions.iter() { + query_builder + .add_group_by_clause(dim) + .attach_printable("Error grouping by dimensions") + .switch()?; + } + + if let Some(_granularity) = granularity.as_ref() { + query_builder + .add_group_by_clause("time_bucket") + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + SdkEventMetricsBucketIdentifier::new( + i.payment_method.clone(), + i.platform.clone(), + i.browser_name.clone(), + i.source.clone(), + i.component.clone(), + i.payment_experience.clone(), + i.time_bucket.clone(), + ), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/analytics/src/sdk_events/metrics/payment_success_count.rs b/crates/analytics/src/sdk_events/metrics/payment_success_count.rs new file mode 100644 index 000000000000..3faf8213632f --- /dev/null +++ b/crates/analytics/src/sdk_events/metrics/payment_success_count.rs @@ -0,0 +1,118 @@ +use api_models::analytics::{ + sdk_events::{ + SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames, + }, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::SdkEventMetricRow; +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct PaymentSuccessCount; + +#[async_trait::async_trait] +impl super::SdkEventMetric for PaymentSuccessCount +where + T: AnalyticsDataSource + super::SdkEventMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[SdkEventDimensions], + publishable_key: &str, + filters: &SdkEventFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::SdkEvents); + let dimensions = dimensions.to_vec(); + + for dim in dimensions.iter() { + query_builder.add_select_column(dim).switch()?; + } + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + + if let Some(granularity) = granularity.as_ref() { + query_builder + .add_granularity_in_mins(granularity) + .switch()?; + } + + filters.set_filter_clause(&mut query_builder).switch()?; + + query_builder + .add_filter_clause("merchant_id", publishable_key) + .switch()?; + + query_builder + .add_bool_filter_clause("first_event", 1) + .switch()?; + + query_builder + .add_filter_clause("event_name", SdkEventNames::PaymentSuccess) + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + for dim in dimensions.iter() { + query_builder + .add_group_by_clause(dim) + .attach_printable("Error grouping by dimensions") + .switch()?; + } + + if let Some(_granularity) = granularity.as_ref() { + query_builder + .add_group_by_clause("time_bucket") + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + SdkEventMetricsBucketIdentifier::new( + i.payment_method.clone(), + i.platform.clone(), + i.browser_name.clone(), + i.source.clone(), + i.component.clone(), + i.payment_experience.clone(), + i.time_bucket.clone(), + ), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/analytics/src/sdk_events/metrics/sdk_initiated_count.rs b/crates/analytics/src/sdk_events/metrics/sdk_initiated_count.rs new file mode 100644 index 000000000000..a525e7890b75 --- /dev/null +++ b/crates/analytics/src/sdk_events/metrics/sdk_initiated_count.rs @@ -0,0 +1,118 @@ +use api_models::analytics::{ + sdk_events::{ + SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames, + }, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::SdkEventMetricRow; +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct SdkInitiatedCount; + +#[async_trait::async_trait] +impl super::SdkEventMetric for SdkInitiatedCount +where + T: AnalyticsDataSource + super::SdkEventMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[SdkEventDimensions], + publishable_key: &str, + filters: &SdkEventFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::SdkEvents); + let dimensions = dimensions.to_vec(); + + for dim in dimensions.iter() { + query_builder.add_select_column(dim).switch()?; + } + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + + if let Some(granularity) = granularity.as_ref() { + query_builder + .add_granularity_in_mins(granularity) + .switch()?; + } + + filters.set_filter_clause(&mut query_builder).switch()?; + + query_builder + .add_filter_clause("merchant_id", publishable_key) + .switch()?; + + query_builder + .add_bool_filter_clause("first_event", 1) + .switch()?; + + query_builder + .add_filter_clause("event_name", SdkEventNames::StripeElementsCalled) + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + for dim in dimensions.iter() { + query_builder + .add_group_by_clause(dim) + .attach_printable("Error grouping by dimensions") + .switch()?; + } + + if let Some(_granularity) = granularity.as_ref() { + query_builder + .add_group_by_clause("time_bucket") + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + SdkEventMetricsBucketIdentifier::new( + i.payment_method.clone(), + i.platform.clone(), + i.browser_name.clone(), + i.source.clone(), + i.component.clone(), + i.payment_experience.clone(), + i.time_bucket.clone(), + ), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/analytics/src/sdk_events/metrics/sdk_rendered_count.rs b/crates/analytics/src/sdk_events/metrics/sdk_rendered_count.rs new file mode 100644 index 000000000000..ed9e776423a8 --- /dev/null +++ b/crates/analytics/src/sdk_events/metrics/sdk_rendered_count.rs @@ -0,0 +1,118 @@ +use api_models::analytics::{ + sdk_events::{ + SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames, + }, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::SdkEventMetricRow; +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct SdkRenderedCount; + +#[async_trait::async_trait] +impl super::SdkEventMetric for SdkRenderedCount +where + T: AnalyticsDataSource + super::SdkEventMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + dimensions: &[SdkEventDimensions], + publishable_key: &str, + filters: &SdkEventFilters, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::SdkEvents); + let dimensions = dimensions.to_vec(); + + for dim in dimensions.iter() { + query_builder.add_select_column(dim).switch()?; + } + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + + if let Some(granularity) = granularity.as_ref() { + query_builder + .add_granularity_in_mins(granularity) + .switch()?; + } + + filters.set_filter_clause(&mut query_builder).switch()?; + + query_builder + .add_filter_clause("merchant_id", publishable_key) + .switch()?; + + query_builder + .add_bool_filter_clause("first_event", 1) + .switch()?; + + query_builder + .add_filter_clause("event_name", SdkEventNames::AppRendered) + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + for dim in dimensions.iter() { + query_builder + .add_group_by_clause(dim) + .attach_printable("Error grouping by dimensions") + .switch()?; + } + + if let Some(_granularity) = granularity.as_ref() { + query_builder + .add_group_by_clause("time_bucket") + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + SdkEventMetricsBucketIdentifier::new( + i.payment_method.clone(), + i.platform.clone(), + i.browser_name.clone(), + i.source.clone(), + i.component.clone(), + i.payment_experience.clone(), + i.time_bucket.clone(), + ), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/analytics/src/sdk_events/types.rs b/crates/analytics/src/sdk_events/types.rs new file mode 100644 index 000000000000..d631b3158ed4 --- /dev/null +++ b/crates/analytics/src/sdk_events/types.rs @@ -0,0 +1,50 @@ +use api_models::analytics::sdk_events::{SdkEventDimensions, SdkEventFilters}; +use error_stack::ResultExt; + +use crate::{ + query::{QueryBuilder, QueryFilter, QueryResult, ToSql}, + types::{AnalyticsCollection, AnalyticsDataSource}, +}; + +impl QueryFilter for SdkEventFilters +where + T: AnalyticsDataSource, + AnalyticsCollection: ToSql, +{ + fn set_filter_clause(&self, builder: &mut QueryBuilder) -> QueryResult<()> { + if !self.payment_method.is_empty() { + builder + .add_filter_in_range_clause(SdkEventDimensions::PaymentMethod, &self.payment_method) + .attach_printable("Error adding payment method filter")?; + } + if !self.platform.is_empty() { + builder + .add_filter_in_range_clause(SdkEventDimensions::Platform, &self.platform) + .attach_printable("Error adding platform filter")?; + } + if !self.browser_name.is_empty() { + builder + .add_filter_in_range_clause(SdkEventDimensions::BrowserName, &self.browser_name) + .attach_printable("Error adding browser name filter")?; + } + if !self.source.is_empty() { + builder + .add_filter_in_range_clause(SdkEventDimensions::Source, &self.source) + .attach_printable("Error adding source filter")?; + } + if !self.component.is_empty() { + builder + .add_filter_in_range_clause(SdkEventDimensions::Component, &self.component) + .attach_printable("Error adding component filter")?; + } + if !self.payment_experience.is_empty() { + builder + .add_filter_in_range_clause( + SdkEventDimensions::PaymentExperience, + &self.payment_experience, + ) + .attach_printable("Error adding payment experience filter")?; + } + Ok(()) + } +} diff --git a/crates/router/src/analytics/sqlx.rs b/crates/analytics/src/sqlx.rs similarity index 64% rename from crates/router/src/analytics/sqlx.rs rename to crates/analytics/src/sqlx.rs index b88a2065f0b0..cdd2647e4e71 100644 --- a/crates/router/src/analytics/sqlx.rs +++ b/crates/analytics/src/sqlx.rs @@ -1,14 +1,11 @@ use std::{fmt::Display, str::FromStr}; use api_models::analytics::refunds::RefundType; -use common_enums::enums::{ +use common_utils::errors::{CustomResult, ParsingError}; +use diesel_models::enums::{ AttemptStatus, AuthenticationType, Currency, PaymentMethod, RefundStatus, }; -use common_utils::errors::{CustomResult, ParsingError}; use error_stack::{IntoReport, ResultExt}; -#[cfg(feature = "kms")] -use external_services::{kms, kms::decrypt::KmsDecrypt}; -#[cfg(not(feature = "kms"))] use masking::PeekInterface; use sqlx::{ postgres::{PgArgumentBuffer, PgPoolOptions, PgRow, PgTypeInfo, PgValueRef}, @@ -16,15 +13,16 @@ use sqlx::{ Error::ColumnNotFound, FromRow, Pool, Postgres, Row, }; +use storage_impl::config::Database; use time::PrimitiveDateTime; use super::{ - query::{Aggregate, ToSql}, + query::{Aggregate, ToSql, Window}, types::{ AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, QueryExecutionError, + TableEngine, }, }; -use crate::configs::settings::Database; #[derive(Debug, Clone)] pub struct SqlxClient { @@ -47,19 +45,7 @@ impl Default for SqlxClient { } impl SqlxClient { - pub async fn from_conf( - conf: &Database, - #[cfg(feature = "kms")] kms_client: &kms::KmsClient, - ) -> Self { - #[cfg(feature = "kms")] - #[allow(clippy::expect_used)] - let password = conf - .password - .decrypt_inner(kms_client) - .await - .expect("Failed to KMS decrypt database password"); - - #[cfg(not(feature = "kms"))] + pub async fn from_conf(conf: &Database) -> Self { let password = &conf.password.peek(); let database_url = format!( "postgres://{}:{}@{}:{}/{}", @@ -154,6 +140,7 @@ where impl super::payments::filters::PaymentFilterAnalytics for SqlxClient {} impl super::payments::metrics::PaymentMetricAnalytics for SqlxClient {} +impl super::payments::distribution::PaymentDistributionAnalytics for SqlxClient {} impl super::refunds::metrics::RefundMetricAnalytics for SqlxClient {} impl super::refunds::filters::RefundFilterAnalytics for SqlxClient {} @@ -207,7 +194,7 @@ impl<'a> FromRow<'a, PgRow> for super::refunds::metrics::RefundMetricRow { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; - + // Removing millisecond precision to get accurate diffs against clickhouse let start_bucket: Option = row .try_get::, _>("start_bucket")? .and_then(|dt| dt.replace_millisecond(0).ok()); @@ -253,6 +240,11 @@ impl<'a> FromRow<'a, PgRow> for super::payments::metrics::PaymentMetricRow { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; + let payment_method_type: Option = + row.try_get("payment_method_type").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; let total: Option = row.try_get("total").or_else(|e| match e { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), @@ -261,7 +253,72 @@ impl<'a> FromRow<'a, PgRow> for super::payments::metrics::PaymentMetricRow { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; + // Removing millisecond precision to get accurate diffs against clickhouse + let start_bucket: Option = row + .try_get::, _>("start_bucket")? + .and_then(|dt| dt.replace_millisecond(0).ok()); + let end_bucket: Option = row + .try_get::, _>("end_bucket")? + .and_then(|dt| dt.replace_millisecond(0).ok()); + Ok(Self { + currency, + status, + connector, + authentication_type, + payment_method, + payment_method_type, + total, + count, + start_bucket, + end_bucket, + }) + } +} +impl<'a> FromRow<'a, PgRow> for super::payments::distribution::PaymentDistributionRow { + fn from_row(row: &'a PgRow) -> sqlx::Result { + let currency: Option> = + row.try_get("currency").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let status: Option> = + row.try_get("status").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let connector: Option = row.try_get("connector").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let authentication_type: Option> = + row.try_get("authentication_type").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let payment_method: Option = + row.try_get("payment_method").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let payment_method_type: Option = + row.try_get("payment_method_type").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let total: Option = row.try_get("total").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let count: Option = row.try_get("count").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + let error_message: Option = row.try_get("error_message").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; + // Removing millisecond precision to get accurate diffs against clickhouse let start_bucket: Option = row .try_get::, _>("start_bucket")? .and_then(|dt| dt.replace_millisecond(0).ok()); @@ -274,8 +331,10 @@ impl<'a> FromRow<'a, PgRow> for super::payments::metrics::PaymentMetricRow { connector, authentication_type, payment_method, + payment_method_type, total, count, + error_message, start_bucket, end_bucket, }) @@ -308,12 +367,18 @@ impl<'a> FromRow<'a, PgRow> for super::payments::filters::FilterRow { ColumnNotFound(_) => Ok(Default::default()), e => Err(e), })?; + let payment_method_type: Option = + row.try_get("payment_method_type").or_else(|e| match e { + ColumnNotFound(_) => Ok(Default::default()), + e => Err(e), + })?; Ok(Self { currency, status, connector, authentication_type, payment_method, + payment_method_type, }) } } @@ -349,16 +414,21 @@ impl<'a> FromRow<'a, PgRow> for super::refunds::filters::RefundFilterRow { } impl ToSql for PrimitiveDateTime { - fn to_sql(&self) -> error_stack::Result { + fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result { Ok(self.to_string()) } } impl ToSql for AnalyticsCollection { - fn to_sql(&self) -> error_stack::Result { + fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result { match self { Self::Payment => Ok("payment_attempt".to_string()), Self::Refund => Ok("refund".to_string()), + Self::SdkEvents => Err(error_stack::report!(ParsingError::UnknownError) + .attach_printable("SdkEvents table is not implemented for Sqlx"))?, + Self::ApiEvents => Err(error_stack::report!(ParsingError::UnknownError) + .attach_printable("ApiEvents table is not implemented for Sqlx"))?, + Self::PaymentIntent => Ok("payment_intent".to_string()), } } } @@ -367,7 +437,7 @@ impl ToSql for Aggregate where T: ToSql, { - fn to_sql(&self) -> error_stack::Result { + fn to_sql(&self, table_engine: &TableEngine) -> error_stack::Result { Ok(match self { Self::Count { field: _, alias } => { format!( @@ -378,21 +448,86 @@ where Self::Sum { field, alias } => { format!( "sum({}){}", - field.to_sql().attach_printable("Failed to sum aggregate")?, + field + .to_sql(table_engine) + .attach_printable("Failed to sum aggregate")?, alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias)) ) } Self::Min { field, alias } => { format!( "min({}){}", - field.to_sql().attach_printable("Failed to min aggregate")?, + field + .to_sql(table_engine) + .attach_printable("Failed to min aggregate")?, alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias)) ) } Self::Max { field, alias } => { format!( "max({}){}", - field.to_sql().attach_printable("Failed to max aggregate")?, + field + .to_sql(table_engine) + .attach_printable("Failed to max aggregate")?, + alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias)) + ) + } + }) + } +} + +impl ToSql for Window +where + T: ToSql, +{ + fn to_sql(&self, table_engine: &TableEngine) -> error_stack::Result { + Ok(match self { + Self::Sum { + field, + partition_by, + order_by, + alias, + } => { + format!( + "sum({}) over ({}{}){}", + field + .to_sql(table_engine) + .attach_printable("Failed to sum window")?, + partition_by.as_ref().map_or_else( + || "".to_owned(), + |partition_by| format!("partition by {}", partition_by.to_owned()) + ), + order_by.as_ref().map_or_else( + || "".to_owned(), + |(order_column, order)| format!( + " order by {} {}", + order_column.to_owned(), + order.to_string() + ) + ), + alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias)) + ) + } + Self::RowNumber { + field: _, + partition_by, + order_by, + alias, + } => { + format!( + "row_number() over ({}{}){}", + partition_by.as_ref().map_or_else( + || "".to_owned(), + |partition_by| format!("partition by {}", partition_by.to_owned()) + ), + order_by.as_ref().map_or_else( + || "".to_owned(), + |(order_column, order)| format!( + " order by {} {}", + order_column.to_owned(), + order.to_string() + ) + ), alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias)) ) } diff --git a/crates/router/src/analytics/types.rs b/crates/analytics/src/types.rs similarity index 83% rename from crates/router/src/analytics/types.rs rename to crates/analytics/src/types.rs index fe20e812a9b8..16d342d3d2ee 100644 --- a/crates/router/src/analytics/types.rs +++ b/crates/analytics/src/types.rs @@ -2,25 +2,36 @@ use std::{fmt::Display, str::FromStr}; use common_utils::{ errors::{CustomResult, ErrorSwitch, ParsingError}, - events::ApiEventMetric, + events::{ApiEventMetric, ApiEventsType}, + impl_misc_api_event_type, }; use error_stack::{report, Report, ResultExt}; use super::query::QueryBuildingError; -#[derive(serde::Deserialize, Debug, masking::Serialize)] +#[derive(serde::Deserialize, Debug, serde::Serialize)] #[serde(rename_all = "snake_case")] pub enum AnalyticsDomain { Payments, Refunds, + SdkEvents, + ApiEvents, } -impl ApiEventMetric for AnalyticsDomain {} - #[derive(Debug, strum::AsRefStr, strum::Display, Clone, Copy)] pub enum AnalyticsCollection { Payment, Refund, + SdkEvents, + ApiEvents, + PaymentIntent, +} + +#[allow(dead_code)] +#[derive(Debug)] +pub enum TableEngine { + CollapsingMergeTree { sign: &'static str }, + BasicTree, } #[derive(Debug, serde::Serialize, serde::Deserialize, Eq, PartialEq)] @@ -50,6 +61,7 @@ where // Analytics Framework pub trait RefundAnalytics {} +pub trait SdkEventAnalytics {} #[async_trait::async_trait] pub trait AnalyticsDataSource @@ -60,6 +72,10 @@ where async fn load_results(&self, query: &str) -> CustomResult, QueryExecutionError> where Self: LoadRow; + + fn get_table_engine(_table: AnalyticsCollection) -> TableEngine { + TableEngine::BasicTree + } } pub trait LoadRow @@ -117,3 +133,5 @@ impl ErrorSwitch for QueryBuildingError { FiltersError::QueryBuildingError } } + +impl_misc_api_event_type!(AnalyticsDomain); diff --git a/crates/router/src/analytics/utils.rs b/crates/analytics/src/utils.rs similarity index 52% rename from crates/router/src/analytics/utils.rs rename to crates/analytics/src/utils.rs index f7e6ea69dc37..6a0aa973a1e7 100644 --- a/crates/router/src/analytics/utils.rs +++ b/crates/analytics/src/utils.rs @@ -1,6 +1,8 @@ use api_models::analytics::{ + api_event::{ApiEventDimensions, ApiEventMetrics}, payments::{PaymentDimensions, PaymentMetrics}, refunds::{RefundDimensions, RefundMetrics}, + sdk_events::{SdkEventDimensions, SdkEventMetrics}, NameDescription, }; use strum::IntoEnumIterator; @@ -13,6 +15,14 @@ pub fn get_refund_dimensions() -> Vec { RefundDimensions::iter().map(Into::into).collect() } +pub fn get_sdk_event_dimensions() -> Vec { + SdkEventDimensions::iter().map(Into::into).collect() +} + +pub fn get_api_event_dimensions() -> Vec { + ApiEventDimensions::iter().map(Into::into).collect() +} + pub fn get_payment_metrics_info() -> Vec { PaymentMetrics::iter().map(Into::into).collect() } @@ -20,3 +30,11 @@ pub fn get_payment_metrics_info() -> Vec { pub fn get_refund_metrics_info() -> Vec { RefundMetrics::iter().map(Into::into).collect() } + +pub fn get_sdk_event_metrics_info() -> Vec { + SdkEventMetrics::iter().map(Into::into).collect() +} + +pub fn get_api_event_metrics_info() -> Vec { + ApiEventMetrics::iter().map(Into::into).collect() +} diff --git a/crates/api_models/Cargo.toml b/crates/api_models/Cargo.toml index 73c2d673c972..cb2e243745de 100644 --- a/crates/api_models/Cargo.toml +++ b/crates/api_models/Cargo.toml @@ -14,7 +14,7 @@ connector_choice_bcompat = [] errors = ["dep:actix-web", "dep:reqwest"] backwards_compatibility = ["connector_choice_bcompat"] connector_choice_mca_id = ["euclid/connector_choice_mca_id"] -dummy_connector = ["euclid/dummy_connector"] +dummy_connector = ["euclid/dummy_connector", "common_enums/dummy_connector"] detailed_errors = [] payouts = [] diff --git a/crates/api_models/src/admin.rs b/crates/api_models/src/admin.rs index efde4a048323..6bb4fd4afa0f 100644 --- a/crates/api_models/src/admin.rs +++ b/crates/api_models/src/admin.rs @@ -1,3 +1,5 @@ +use std::collections::HashMap; + use common_utils::{ crypto::{Encryptable, OptionalEncryptableName}, pii, @@ -614,6 +616,36 @@ pub struct MerchantConnectorCreate { pub status: Option, } +// Different patterns of authentication. +#[derive(Default, Debug, Clone, serde::Deserialize, serde::Serialize)] +#[serde(tag = "auth_type")] +pub enum ConnectorAuthType { + TemporaryAuth, + HeaderKey { + api_key: Secret, + }, + BodyKey { + api_key: Secret, + key1: Secret, + }, + SignatureKey { + api_key: Secret, + key1: Secret, + api_secret: Secret, + }, + MultiAuthKey { + api_key: Secret, + key1: Secret, + api_secret: Secret, + key2: Secret, + }, + CurrencyAuthKey { + auth_key_map: HashMap, + }, + #[default] + NoKey, +} + #[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] #[serde(deny_unknown_fields)] pub struct MerchantConnectorWebhookDetails { diff --git a/crates/api_models/src/analytics.rs b/crates/api_models/src/analytics.rs index 0358b6b313cf..0263427b0fde 100644 --- a/crates/api_models/src/analytics.rs +++ b/crates/api_models/src/analytics.rs @@ -1,15 +1,20 @@ use std::collections::HashSet; -use common_utils::events::ApiEventMetric; -use time::PrimitiveDateTime; +use common_utils::pii::EmailStrategy; +use masking::Secret; use self::{ - payments::{PaymentDimensions, PaymentMetrics}, + api_event::{ApiEventDimensions, ApiEventMetrics}, + payments::{PaymentDimensions, PaymentDistributions, PaymentMetrics}, refunds::{RefundDimensions, RefundMetrics}, + sdk_events::{SdkEventDimensions, SdkEventMetrics}, }; +pub use crate::payments::TimeRange; +pub mod api_event; pub mod payments; pub mod refunds; +pub mod sdk_events; #[derive(Debug, serde::Serialize)] pub struct NameDescription { @@ -25,23 +30,12 @@ pub struct GetInfoResponse { pub dimensions: Vec, } -impl ApiEventMetric for GetInfoResponse {} - -#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize, PartialEq, Eq, Hash)] -#[serde(rename_all = "camelCase")] -pub struct TimeRange { - #[serde(with = "common_utils::custom_serde::iso8601")] - pub start_time: PrimitiveDateTime, - #[serde(default, with = "common_utils::custom_serde::iso8601::option")] - pub end_time: Option, -} - -#[derive(Clone, Copy, Debug, serde::Deserialize, masking::Serialize)] +#[derive(Clone, Copy, Debug, serde::Deserialize, serde::Serialize)] pub struct TimeSeries { pub granularity: Granularity, } -#[derive(Clone, Copy, Debug, serde::Deserialize, masking::Serialize)] +#[derive(Clone, Copy, Debug, serde::Deserialize, serde::Serialize)] pub enum Granularity { #[serde(rename = "G_ONEMIN")] OneMin, @@ -57,7 +51,7 @@ pub enum Granularity { OneDay, } -#[derive(Clone, Debug, serde::Deserialize, masking::Serialize)] +#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)] #[serde(rename_all = "camelCase")] pub struct GetPaymentMetricRequest { pub time_series: Option, @@ -67,13 +61,51 @@ pub struct GetPaymentMetricRequest { #[serde(default)] pub filters: payments::PaymentFilters, pub metrics: HashSet, + pub distribution: Option, #[serde(default)] pub delta: bool, } -impl ApiEventMetric for GetPaymentMetricRequest {} +#[derive(Clone, Copy, Debug, serde::Deserialize, serde::Serialize)] +pub enum QueryLimit { + #[serde(rename = "TOP_5")] + Top5, + #[serde(rename = "TOP_10")] + Top10, +} + +#[allow(clippy::from_over_into)] +impl Into for QueryLimit { + fn into(self) -> u64 { + match self { + Self::Top5 => 5, + Self::Top10 => 10, + } + } +} + +#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct Distribution { + pub distribution_for: PaymentDistributions, + pub distribution_cardinality: QueryLimit, +} + +#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ReportRequest { + pub time_range: TimeRange, +} + +#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct GenerateReportRequest { + pub request: ReportRequest, + pub merchant_id: String, + pub email: Secret, +} -#[derive(Clone, Debug, serde::Deserialize, masking::Serialize)] +#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)] #[serde(rename_all = "camelCase")] pub struct GetRefundMetricRequest { pub time_series: Option, @@ -87,14 +119,26 @@ pub struct GetRefundMetricRequest { pub delta: bool, } -impl ApiEventMetric for GetRefundMetricRequest {} +#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct GetSdkEventMetricRequest { + pub time_series: Option, + pub time_range: TimeRange, + #[serde(default)] + pub group_by_names: Vec, + #[serde(default)] + pub filters: sdk_events::SdkEventFilters, + pub metrics: HashSet, + #[serde(default)] + pub delta: bool, +} #[derive(Debug, serde::Serialize)] pub struct AnalyticsMetadata { pub current_time_range: TimeRange, } -#[derive(Debug, serde::Deserialize, masking::Serialize)] +#[derive(Debug, serde::Deserialize, serde::Serialize)] #[serde(rename_all = "camelCase")] pub struct GetPaymentFiltersRequest { pub time_range: TimeRange, @@ -102,16 +146,12 @@ pub struct GetPaymentFiltersRequest { pub group_by_names: Vec, } -impl ApiEventMetric for GetPaymentFiltersRequest {} - #[derive(Debug, Default, serde::Serialize)] #[serde(rename_all = "camelCase")] pub struct PaymentFiltersResponse { pub query_data: Vec, } -impl ApiEventMetric for PaymentFiltersResponse {} - #[derive(Debug, serde::Serialize)] #[serde(rename_all = "camelCase")] pub struct FilterValue { @@ -119,34 +159,88 @@ pub struct FilterValue { pub values: Vec, } -#[derive(Debug, serde::Deserialize, masking::Serialize)] +#[derive(Debug, serde::Deserialize, serde::Serialize)] #[serde(rename_all = "camelCase")] + pub struct GetRefundFilterRequest { pub time_range: TimeRange, #[serde(default)] pub group_by_names: Vec, } -impl ApiEventMetric for GetRefundFilterRequest {} - #[derive(Debug, Default, serde::Serialize, Eq, PartialEq)] #[serde(rename_all = "camelCase")] pub struct RefundFiltersResponse { pub query_data: Vec, } -impl ApiEventMetric for RefundFiltersResponse {} - #[derive(Debug, serde::Serialize, Eq, PartialEq)] #[serde(rename_all = "camelCase")] + pub struct RefundFilterValue { pub dimension: RefundDimensions, pub values: Vec, } +#[derive(Debug, serde::Deserialize, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct GetSdkEventFiltersRequest { + pub time_range: TimeRange, + #[serde(default)] + pub group_by_names: Vec, +} + +#[derive(Debug, Default, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct SdkEventFiltersResponse { + pub query_data: Vec, +} + +#[derive(Debug, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct SdkEventFilterValue { + pub dimension: SdkEventDimensions, + pub values: Vec, +} + #[derive(Debug, serde::Serialize)] #[serde(rename_all = "camelCase")] pub struct MetricsResponse { pub query_data: Vec, pub meta_data: [AnalyticsMetadata; 1], } + +#[derive(Debug, serde::Deserialize, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct GetApiEventFiltersRequest { + pub time_range: TimeRange, + #[serde(default)] + pub group_by_names: Vec, +} + +#[derive(Debug, Default, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ApiEventFiltersResponse { + pub query_data: Vec, +} + +#[derive(Debug, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ApiEventFilterValue { + pub dimension: ApiEventDimensions, + pub values: Vec, +} + +#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct GetApiEventMetricRequest { + pub time_series: Option, + pub time_range: TimeRange, + #[serde(default)] + pub group_by_names: Vec, + #[serde(default)] + pub filters: api_event::ApiEventFilters, + pub metrics: HashSet, + #[serde(default)] + pub delta: bool, +} diff --git a/crates/api_models/src/analytics/api_event.rs b/crates/api_models/src/analytics/api_event.rs new file mode 100644 index 000000000000..62fe829f01b9 --- /dev/null +++ b/crates/api_models/src/analytics/api_event.rs @@ -0,0 +1,148 @@ +use std::{ + collections::hash_map::DefaultHasher, + hash::{Hash, Hasher}, +}; + +use super::{NameDescription, TimeRange}; +#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)] +pub struct ApiLogsRequest { + #[serde(flatten)] + pub query_param: QueryType, + pub api_name_filter: Option>, +} + +pub enum FilterType { + ApiCountFilter, + LatencyFilter, + StatusCodeFilter, +} + +#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)] +#[serde(tag = "type")] +pub enum QueryType { + Payment { + payment_id: String, + }, + Refund { + payment_id: String, + refund_id: String, + }, +} + +#[derive( + Debug, + serde::Serialize, + serde::Deserialize, + strum::AsRefStr, + PartialEq, + PartialOrd, + Eq, + Ord, + strum::Display, + strum::EnumIter, + Clone, + Copy, +)] +#[serde(rename_all = "snake_case")] +#[strum(serialize_all = "snake_case")] +pub enum ApiEventDimensions { + // Do not change the order of these enums + // Consult the Dashboard FE folks since these also affects the order of metrics on FE + StatusCode, + FlowType, + ApiFlow, +} + +impl From for NameDescription { + fn from(value: ApiEventDimensions) -> Self { + Self { + name: value.to_string(), + desc: String::new(), + } + } +} +#[derive(Clone, Debug, Default, serde::Deserialize, serde::Serialize)] +pub struct ApiEventFilters { + pub status_code: Vec, + pub flow_type: Vec, + pub api_flow: Vec, +} + +#[derive( + Clone, + Debug, + Hash, + PartialEq, + Eq, + serde::Serialize, + serde::Deserialize, + strum::Display, + strum::EnumIter, + strum::AsRefStr, +)] +#[strum(serialize_all = "snake_case")] +#[serde(rename_all = "snake_case")] +pub enum ApiEventMetrics { + Latency, + ApiCount, + StatusCodeCount, +} + +impl From for NameDescription { + fn from(value: ApiEventMetrics) -> Self { + Self { + name: value.to_string(), + desc: String::new(), + } + } +} + +#[derive(Debug, serde::Serialize, Eq)] +pub struct ApiEventMetricsBucketIdentifier { + #[serde(rename = "time_range")] + pub time_bucket: TimeRange, + // Coz FE sucks + #[serde(rename = "time_bucket")] + #[serde(with = "common_utils::custom_serde::iso8601custom")] + pub start_time: time::PrimitiveDateTime, +} + +impl ApiEventMetricsBucketIdentifier { + pub fn new(normalized_time_range: TimeRange) -> Self { + Self { + time_bucket: normalized_time_range, + start_time: normalized_time_range.start_time, + } + } +} + +impl Hash for ApiEventMetricsBucketIdentifier { + fn hash(&self, state: &mut H) { + self.time_bucket.hash(state); + } +} + +impl PartialEq for ApiEventMetricsBucketIdentifier { + fn eq(&self, other: &Self) -> bool { + let mut left = DefaultHasher::new(); + self.hash(&mut left); + let mut right = DefaultHasher::new(); + other.hash(&mut right); + left.finish() == right.finish() + } +} + +#[derive(Debug, serde::Serialize)] +pub struct ApiEventMetricsBucketValue { + pub latency: Option, + pub api_count: Option, + pub status_code_count: Option, +} + +#[derive(Debug, serde::Serialize)] +pub struct ApiMetricsBucketResponse { + #[serde(flatten)] + pub values: ApiEventMetricsBucketValue, + #[serde(flatten)] + pub dimensions: ApiEventMetricsBucketIdentifier, +} diff --git a/crates/api_models/src/analytics/payments.rs b/crates/api_models/src/analytics/payments.rs index b5e5852d6283..2d7ae262f489 100644 --- a/crates/api_models/src/analytics/payments.rs +++ b/crates/api_models/src/analytics/payments.rs @@ -3,13 +3,12 @@ use std::{ hash::{Hash, Hasher}, }; -use common_enums::enums::{AttemptStatus, AuthenticationType, Currency, PaymentMethod}; -use common_utils::events::ApiEventMetric; - use super::{NameDescription, TimeRange}; -use crate::{analytics::MetricsResponse, enums::Connector}; +use crate::enums::{ + AttemptStatus, AuthenticationType, Connector, Currency, PaymentMethod, PaymentMethodType, +}; -#[derive(Clone, Debug, Default, serde::Deserialize, masking::Serialize)] +#[derive(Clone, Debug, Default, serde::Deserialize, serde::Serialize)] pub struct PaymentFilters { #[serde(default)] pub currency: Vec, @@ -21,6 +20,8 @@ pub struct PaymentFilters { pub auth_type: Vec, #[serde(default)] pub payment_method: Vec, + #[serde(default)] + pub payment_method_type: Vec, } #[derive( @@ -44,6 +45,7 @@ pub enum PaymentDimensions { // Consult the Dashboard FE folks since these also affects the order of metrics on FE Connector, PaymentMethod, + PaymentMethodType, Currency, #[strum(serialize = "authentication_type")] #[serde(rename = "authentication_type")] @@ -73,6 +75,35 @@ pub enum PaymentMetrics { PaymentSuccessCount, PaymentProcessedAmount, AvgTicketSize, + RetriesCount, + ConnectorSuccessRate, +} + +#[derive(Debug, Default, serde::Serialize)] +pub struct ErrorResult { + pub reason: String, + pub count: i64, + pub percentage: f64, +} + +#[derive( + Clone, + Copy, + Debug, + Hash, + PartialEq, + Eq, + serde::Serialize, + serde::Deserialize, + strum::Display, + strum::EnumIter, + strum::AsRefStr, +)] +#[strum(serialize_all = "snake_case")] +#[serde(rename_all = "snake_case")] +pub enum PaymentDistributions { + #[strum(serialize = "error_message")] + PaymentErrorMessage, } pub mod metric_behaviour { @@ -109,6 +140,7 @@ pub struct PaymentMetricsBucketIdentifier { #[serde(rename = "authentication_type")] pub auth_type: Option, pub payment_method: Option, + pub payment_method_type: Option, #[serde(rename = "time_range")] pub time_bucket: TimeRange, // Coz FE sucks @@ -124,6 +156,7 @@ impl PaymentMetricsBucketIdentifier { connector: Option, auth_type: Option, payment_method: Option, + payment_method_type: Option, normalized_time_range: TimeRange, ) -> Self { Self { @@ -132,6 +165,7 @@ impl PaymentMetricsBucketIdentifier { connector, auth_type, payment_method, + payment_method_type, time_bucket: normalized_time_range, start_time: normalized_time_range.start_time, } @@ -145,6 +179,7 @@ impl Hash for PaymentMetricsBucketIdentifier { self.connector.hash(state); self.auth_type.map(|i| i.to_string()).hash(state); self.payment_method.hash(state); + self.payment_method_type.hash(state); self.time_bucket.hash(state); } } @@ -166,6 +201,10 @@ pub struct PaymentMetricsBucketValue { pub payment_success_count: Option, pub payment_processed_amount: Option, pub avg_ticket_size: Option, + pub payment_error_message: Option>, + pub retries_count: Option, + pub retries_amount_processed: Option, + pub connector_success_rate: Option, } #[derive(Debug, serde::Serialize)] @@ -175,6 +214,3 @@ pub struct MetricsBucketResponse { #[serde(flatten)] pub dimensions: PaymentMetricsBucketIdentifier, } - -impl ApiEventMetric for MetricsBucketResponse {} -impl ApiEventMetric for MetricsResponse {} diff --git a/crates/api_models/src/analytics/refunds.rs b/crates/api_models/src/analytics/refunds.rs index c5d444338d38..5ecdf1cecb3f 100644 --- a/crates/api_models/src/analytics/refunds.rs +++ b/crates/api_models/src/analytics/refunds.rs @@ -3,10 +3,7 @@ use std::{ hash::{Hash, Hasher}, }; -use common_enums::enums::{Currency, RefundStatus}; -use common_utils::events::ApiEventMetric; - -use crate::analytics::MetricsResponse; +use crate::{enums::Currency, refunds::RefundStatus}; #[derive( Clone, @@ -20,7 +17,7 @@ use crate::analytics::MetricsResponse; strum::Display, strum::EnumString, )] -// TODO RefundType common_enums need to mapped to storage_model +// TODO RefundType api_models_oss need to mapped to storage_model #[serde(rename_all = "snake_case")] #[strum(serialize_all = "snake_case")] pub enum RefundType { @@ -31,7 +28,7 @@ pub enum RefundType { } use super::{NameDescription, TimeRange}; -#[derive(Clone, Debug, Default, serde::Deserialize, masking::Serialize)] +#[derive(Clone, Debug, Default, serde::Deserialize, serde::Serialize)] pub struct RefundFilters { #[serde(default)] pub currency: Vec, @@ -115,8 +112,9 @@ impl From for NameDescription { #[derive(Debug, serde::Serialize, Eq)] pub struct RefundMetricsBucketIdentifier { pub currency: Option, - pub refund_status: Option, + pub refund_status: Option, pub connector: Option, + pub refund_type: Option, #[serde(rename = "time_range")] pub time_bucket: TimeRange, @@ -128,7 +126,7 @@ pub struct RefundMetricsBucketIdentifier { impl Hash for RefundMetricsBucketIdentifier { fn hash(&self, state: &mut H) { self.currency.hash(state); - self.refund_status.map(|i| i.to_string()).hash(state); + self.refund_status.hash(state); self.connector.hash(state); self.refund_type.hash(state); self.time_bucket.hash(state); @@ -147,7 +145,7 @@ impl PartialEq for RefundMetricsBucketIdentifier { impl RefundMetricsBucketIdentifier { pub fn new( currency: Option, - refund_status: Option, + refund_status: Option, connector: Option, refund_type: Option, normalized_time_range: TimeRange, @@ -162,7 +160,6 @@ impl RefundMetricsBucketIdentifier { } } } - #[derive(Debug, serde::Serialize)] pub struct RefundMetricsBucketValue { pub refund_success_rate: Option, @@ -170,7 +167,6 @@ pub struct RefundMetricsBucketValue { pub refund_success_count: Option, pub refund_processed_amount: Option, } - #[derive(Debug, serde::Serialize)] pub struct RefundMetricsBucketResponse { #[serde(flatten)] @@ -178,6 +174,3 @@ pub struct RefundMetricsBucketResponse { #[serde(flatten)] pub dimensions: RefundMetricsBucketIdentifier, } - -impl ApiEventMetric for RefundMetricsBucketResponse {} -impl ApiEventMetric for MetricsResponse {} diff --git a/crates/api_models/src/analytics/sdk_events.rs b/crates/api_models/src/analytics/sdk_events.rs new file mode 100644 index 000000000000..76ccb29867f2 --- /dev/null +++ b/crates/api_models/src/analytics/sdk_events.rs @@ -0,0 +1,215 @@ +use std::{ + collections::hash_map::DefaultHasher, + hash::{Hash, Hasher}, +}; + +use super::{NameDescription, TimeRange}; + +#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct SdkEventsRequest { + pub payment_id: String, + pub time_range: TimeRange, +} + +#[derive(Clone, Debug, Default, serde::Deserialize, serde::Serialize)] +pub struct SdkEventFilters { + #[serde(default)] + pub payment_method: Vec, + #[serde(default)] + pub platform: Vec, + #[serde(default)] + pub browser_name: Vec, + #[serde(default)] + pub source: Vec, + #[serde(default)] + pub component: Vec, + #[serde(default)] + pub payment_experience: Vec, +} + +#[derive( + Debug, + serde::Serialize, + serde::Deserialize, + strum::AsRefStr, + PartialEq, + PartialOrd, + Eq, + Ord, + strum::Display, + strum::EnumIter, + Clone, + Copy, +)] +#[serde(rename_all = "snake_case")] +#[strum(serialize_all = "snake_case")] +pub enum SdkEventDimensions { + // Do not change the order of these enums + // Consult the Dashboard FE folks since these also affects the order of metrics on FE + PaymentMethod, + Platform, + BrowserName, + Source, + Component, + PaymentExperience, +} + +#[derive( + Clone, + Debug, + Hash, + PartialEq, + Eq, + serde::Serialize, + serde::Deserialize, + strum::Display, + strum::EnumIter, + strum::AsRefStr, +)] +#[strum(serialize_all = "snake_case")] +#[serde(rename_all = "snake_case")] +pub enum SdkEventMetrics { + PaymentAttempts, + PaymentSuccessCount, + PaymentMethodsCallCount, + SdkRenderedCount, + SdkInitiatedCount, + PaymentMethodSelectedCount, + PaymentDataFilledCount, + AveragePaymentTime, +} + +#[derive( + Clone, + Debug, + Hash, + PartialEq, + Eq, + serde::Serialize, + serde::Deserialize, + strum::Display, + strum::EnumIter, + strum::AsRefStr, +)] +#[strum(serialize_all = "SCREAMING_SNAKE_CASE")] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum SdkEventNames { + StripeElementsCalled, + AppRendered, + PaymentMethodChanged, + PaymentDataFilled, + PaymentAttempt, + PaymentSuccess, + PaymentMethodsCall, + ConfirmCall, + SessionsCall, + CustomerPaymentMethodsCall, + RedirectingUser, + DisplayBankTransferInfoPage, + DisplayQrCodeInfoPage, +} + +pub mod metric_behaviour { + pub struct PaymentAttempts; + pub struct PaymentSuccessCount; + pub struct PaymentMethodsCallCount; + pub struct SdkRenderedCount; + pub struct SdkInitiatedCount; + pub struct PaymentMethodSelectedCount; + pub struct PaymentDataFilledCount; + pub struct AveragePaymentTime; +} + +impl From for NameDescription { + fn from(value: SdkEventMetrics) -> Self { + Self { + name: value.to_string(), + desc: String::new(), + } + } +} + +impl From for NameDescription { + fn from(value: SdkEventDimensions) -> Self { + Self { + name: value.to_string(), + desc: String::new(), + } + } +} + +#[derive(Debug, serde::Serialize, Eq)] +pub struct SdkEventMetricsBucketIdentifier { + pub payment_method: Option, + pub platform: Option, + pub browser_name: Option, + pub source: Option, + pub component: Option, + pub payment_experience: Option, + pub time_bucket: Option, +} + +impl SdkEventMetricsBucketIdentifier { + pub fn new( + payment_method: Option, + platform: Option, + browser_name: Option, + source: Option, + component: Option, + payment_experience: Option, + time_bucket: Option, + ) -> Self { + Self { + payment_method, + platform, + browser_name, + source, + component, + payment_experience, + time_bucket, + } + } +} + +impl Hash for SdkEventMetricsBucketIdentifier { + fn hash(&self, state: &mut H) { + self.payment_method.hash(state); + self.platform.hash(state); + self.browser_name.hash(state); + self.source.hash(state); + self.component.hash(state); + self.payment_experience.hash(state); + self.time_bucket.hash(state); + } +} + +impl PartialEq for SdkEventMetricsBucketIdentifier { + fn eq(&self, other: &Self) -> bool { + let mut left = DefaultHasher::new(); + self.hash(&mut left); + let mut right = DefaultHasher::new(); + other.hash(&mut right); + left.finish() == right.finish() + } +} + +#[derive(Debug, serde::Serialize)] +pub struct SdkEventMetricsBucketValue { + pub payment_attempts: Option, + pub payment_success_count: Option, + pub payment_methods_call_count: Option, + pub average_payment_time: Option, + pub sdk_rendered_count: Option, + pub sdk_initiated_count: Option, + pub payment_method_selected_count: Option, + pub payment_data_filled_count: Option, +} + +#[derive(Debug, serde::Serialize)] +pub struct MetricsBucketResponse { + #[serde(flatten)] + pub values: SdkEventMetricsBucketValue, + #[serde(flatten)] + pub dimensions: SdkEventMetricsBucketIdentifier, +} diff --git a/crates/api_models/src/events.rs b/crates/api_models/src/events.rs index 782c02be7a3a..ac7cdeb83d94 100644 --- a/crates/api_models/src/events.rs +++ b/crates/api_models/src/events.rs @@ -7,6 +7,7 @@ pub mod payouts; pub mod refund; pub mod routing; pub mod user; +pub mod user_role; use common_utils::{ events::{ApiEventMetric, ApiEventsType}, @@ -14,8 +15,16 @@ use common_utils::{ }; use crate::{ - admin::*, api_keys::*, cards_info::*, disputes::*, files::*, mandates::*, payment_methods::*, - payments::*, verifications::*, + admin::*, + analytics::{api_event::*, sdk_events::*, *}, + api_keys::*, + cards_info::*, + disputes::*, + files::*, + mandates::*, + payment_methods::*, + payments::*, + verifications::*, }; impl ApiEventMetric for TimeRange {} @@ -63,7 +72,23 @@ impl_misc_api_event_type!( ApplepayMerchantVerificationRequest, ApplepayMerchantResponse, ApplepayVerifiedDomainsResponse, - UpdateApiKeyRequest + UpdateApiKeyRequest, + GetApiEventFiltersRequest, + ApiEventFiltersResponse, + GetInfoResponse, + GetPaymentMetricRequest, + GetRefundMetricRequest, + GetSdkEventMetricRequest, + GetPaymentFiltersRequest, + PaymentFiltersResponse, + GetRefundFilterRequest, + RefundFiltersResponse, + GetSdkEventFiltersRequest, + SdkEventFiltersResponse, + ApiLogsRequest, + GetApiEventMetricRequest, + SdkEventsRequest, + ReportRequest ); #[cfg(feature = "stripe")] @@ -76,3 +101,9 @@ impl_misc_api_event_type!( CustomerPaymentMethodListResponse, CreateCustomerResponse ); + +impl ApiEventMetric for MetricsResponse { + fn get_api_event_type(&self) -> Option { + Some(ApiEventsType::Miscellaneous) + } +} diff --git a/crates/api_models/src/events/user.rs b/crates/api_models/src/events/user.rs index 4e9f2f284173..8b7cd02c9350 100644 --- a/crates/api_models/src/events/user.rs +++ b/crates/api_models/src/events/user.rs @@ -1,6 +1,14 @@ use common_utils::events::{ApiEventMetric, ApiEventsType}; -use crate::user::{ChangePasswordRequest, ConnectAccountRequest, ConnectAccountResponse}; +#[cfg(feature = "dummy_connector")] +use crate::user::sample_data::SampleDataRequest; +use crate::user::{ + dashboard_metadata::{ + GetMetaDataRequest, GetMetaDataResponse, GetMultipleMetaDataPayload, SetMetaDataRequest, + }, + ChangePasswordRequest, ConnectAccountRequest, ConnectAccountResponse, + CreateInternalUserRequest, GetUsersResponse, SwitchMerchantIdRequest, UserMerchantCreate, +}; impl ApiEventMetric for ConnectAccountResponse { fn get_api_event_type(&self) -> Option { @@ -13,4 +21,17 @@ impl ApiEventMetric for ConnectAccountResponse { impl ApiEventMetric for ConnectAccountRequest {} -common_utils::impl_misc_api_event_type!(ChangePasswordRequest); +common_utils::impl_misc_api_event_type!( + ChangePasswordRequest, + GetMultipleMetaDataPayload, + GetMetaDataResponse, + GetMetaDataRequest, + SetMetaDataRequest, + SwitchMerchantIdRequest, + CreateInternalUserRequest, + UserMerchantCreate, + GetUsersResponse +); + +#[cfg(feature = "dummy_connector")] +common_utils::impl_misc_api_event_type!(SampleDataRequest); diff --git a/crates/api_models/src/events/user_role.rs b/crates/api_models/src/events/user_role.rs new file mode 100644 index 000000000000..aa8d13dab6df --- /dev/null +++ b/crates/api_models/src/events/user_role.rs @@ -0,0 +1,14 @@ +use common_utils::events::{ApiEventMetric, ApiEventsType}; + +use crate::user_role::{ + AuthorizationInfoResponse, GetRoleRequest, ListRolesResponse, RoleInfoResponse, + UpdateUserRoleRequest, +}; + +common_utils::impl_misc_api_event_type!( + ListRolesResponse, + RoleInfoResponse, + GetRoleRequest, + AuthorizationInfoResponse, + UpdateUserRoleRequest +); diff --git a/crates/api_models/src/lib.rs b/crates/api_models/src/lib.rs index ab40a96582bb..056888839a54 100644 --- a/crates/api_models/src/lib.rs +++ b/crates/api_models/src/lib.rs @@ -26,5 +26,7 @@ pub mod refunds; pub mod routing; pub mod surcharge_decision_configs; pub mod user; +pub mod user_role; pub mod verifications; +pub mod verify_connector; pub mod webhooks; diff --git a/crates/api_models/src/payments.rs b/crates/api_models/src/payments.rs index acb9bbdd6cd4..49f2781a18a0 100644 --- a/crates/api_models/src/payments.rs +++ b/crates/api_models/src/payments.rs @@ -204,8 +204,9 @@ pub struct PaymentsRequest { #[schema(example = "187282ab-40ef-47a9-9206-5099ba31e432")] pub payment_token: Option, - /// This is used when payment is to be confirmed and the card is not saved - #[schema(value_type = Option)] + /// This is used when payment is to be confirmed and the card is not saved. + /// This field will be deprecated soon, use the CardToken object instead + #[schema(value_type = Option, deprecated)] pub card_cvc: Option>, /// The shipping address for the payment @@ -310,6 +311,9 @@ pub struct PaymentsRequest { /// The type of the payment that differentiates between normal and various types of mandate payments #[schema(value_type = Option)] pub payment_type: Option, + + ///Request for an incremental authorization + pub request_incremental_authorization: Option, } impl PaymentsRequest { @@ -717,12 +721,16 @@ pub struct Card { pub nick_name: Option>, } -#[derive(Eq, PartialEq, Debug, serde::Deserialize, serde::Serialize, Clone, ToSchema)] +#[derive(Eq, PartialEq, Debug, serde::Deserialize, serde::Serialize, Clone, ToSchema, Default)] #[serde(rename_all = "snake_case")] pub struct CardToken { /// The card holder's name #[schema(value_type = String, example = "John Test")] pub card_holder_name: Option>, + + /// The CVC number for the card + #[schema(value_type = Option)] + pub card_cvc: Option>, } #[derive(Eq, PartialEq, Clone, Debug, serde::Deserialize, serde::Serialize, ToSchema)] @@ -1204,10 +1212,10 @@ pub enum BankRedirectData { OpenBankingUk { // Issuer banks #[schema(value_type = BankNames)] - issuer: api_enums::BankNames, + issuer: Option, /// The country for bank payment #[schema(value_type = CountryAlpha2, example = "US")] - country: api_enums::CountryAlpha2, + country: Option, }, Przelewy24 { //Issuer banks @@ -2210,6 +2218,9 @@ pub struct PaymentsResponse { /// Identifier of the connector ( merchant connector account ) which was chosen to make the payment pub merchant_connector_id: Option, + + /// If true incremental authorization can be performed on this payment + pub incremental_authorization_allowed: Option, } #[derive(Clone, Debug, serde::Deserialize, ToSchema, serde::Serialize)] @@ -2339,9 +2350,11 @@ pub struct PaymentListFilters { pub struct TimeRange { /// The start time to filter payments list or to get list of filters. To get list of filters start time is needed to be passed #[serde(with = "common_utils::custom_serde::iso8601")] + #[serde(alias = "startTime")] pub start_time: PrimitiveDateTime, /// The end time to filter payments list or to get list of filters. If not passed the default time is now #[serde(default, with = "common_utils::custom_serde::iso8601::option")] + #[serde(alias = "endTime")] pub end_time: Option, } diff --git a/crates/api_models/src/user.rs b/crates/api_models/src/user.rs index 41ea9cc5193a..36d730f5118e 100644 --- a/crates/api_models/src/user.rs +++ b/crates/api_models/src/user.rs @@ -1,6 +1,11 @@ use common_utils::pii; use masking::Secret; +use crate::user_role::UserStatus; +pub mod dashboard_metadata; +#[cfg(feature = "dummy_connector")] +pub mod sample_data; + #[derive(serde::Deserialize, Debug, Clone, serde::Serialize)] pub struct ConnectAccountRequest { pub email: pii::Email, @@ -25,3 +30,35 @@ pub struct ChangePasswordRequest { pub new_password: Secret, pub old_password: Secret, } + +#[derive(Debug, serde::Deserialize, serde::Serialize)] +pub struct SwitchMerchantIdRequest { + pub merchant_id: String, +} + +#[derive(serde::Deserialize, Debug, serde::Serialize)] +pub struct CreateInternalUserRequest { + pub name: Secret, + pub email: pii::Email, + pub password: Secret, +} + +#[derive(Debug, serde::Deserialize, serde::Serialize)] +pub struct UserMerchantCreate { + pub company_name: String, +} + +#[derive(Debug, serde::Serialize)] +pub struct GetUsersResponse(pub Vec); + +#[derive(Debug, serde::Serialize)] +pub struct UserDetails { + pub user_id: String, + pub email: pii::Email, + pub name: Secret, + pub role_id: String, + pub role_name: String, + pub status: UserStatus, + #[serde(with = "common_utils::custom_serde::iso8601")] + pub last_modified_at: time::PrimitiveDateTime, +} diff --git a/crates/api_models/src/user/dashboard_metadata.rs b/crates/api_models/src/user/dashboard_metadata.rs new file mode 100644 index 000000000000..04cda3bd7075 --- /dev/null +++ b/crates/api_models/src/user/dashboard_metadata.rs @@ -0,0 +1,110 @@ +use masking::Secret; +use strum::EnumString; + +#[derive(Debug, serde::Deserialize, serde::Serialize)] +pub enum SetMetaDataRequest { + ProductionAgreement(ProductionAgreementRequest), + SetupProcessor(SetupProcessor), + ConfigureEndpoint, + SetupComplete, + FirstProcessorConnected(ProcessorConnected), + SecondProcessorConnected(ProcessorConnected), + ConfiguredRouting(ConfiguredRouting), + TestPayment(TestPayment), + IntegrationMethod(IntegrationMethod), + IntegrationCompleted, + SPRoutingConfigured(ConfiguredRouting), + SPTestPayment, + DownloadWoocom, + ConfigureWoocom, + SetupWoocomWebhook, + IsMultipleConfiguration, +} + +#[derive(Debug, serde::Deserialize, serde::Serialize)] +pub struct ProductionAgreementRequest { + pub version: String, + #[serde(skip_deserializing)] + pub ip_address: Option>, +} + +#[derive(Debug, serde::Deserialize, serde::Serialize)] +pub struct SetupProcessor { + pub connector_id: String, +} + +#[derive(Debug, serde::Deserialize, serde::Serialize)] +pub struct ProcessorConnected { + pub processor_id: String, + pub processor_name: String, +} + +#[derive(Debug, serde::Deserialize, serde::Serialize)] +pub struct ConfiguredRouting { + pub routing_id: String, +} + +#[derive(Debug, serde::Deserialize, serde::Serialize)] +pub struct TestPayment { + pub payment_id: String, +} + +#[derive(Debug, serde::Deserialize, serde::Serialize)] +pub struct IntegrationMethod { + pub integration_type: String, +} + +#[derive(Debug, serde::Deserialize, EnumString, serde::Serialize)] +pub enum GetMetaDataRequest { + ProductionAgreement, + SetupProcessor, + ConfigureEndpoint, + SetupComplete, + FirstProcessorConnected, + SecondProcessorConnected, + ConfiguredRouting, + TestPayment, + IntegrationMethod, + IntegrationCompleted, + StripeConnected, + PaypalConnected, + SPRoutingConfigured, + SPTestPayment, + DownloadWoocom, + ConfigureWoocom, + SetupWoocomWebhook, + IsMultipleConfiguration, +} + +#[derive(Debug, serde::Deserialize, serde::Serialize)] +#[serde(transparent)] +pub struct GetMultipleMetaDataPayload { + pub results: Vec, +} + +#[derive(Debug, serde::Deserialize, serde::Serialize)] +pub struct GetMultipleMetaDataRequest { + pub keys: String, +} + +#[derive(Debug, serde::Serialize)] +pub enum GetMetaDataResponse { + ProductionAgreement(bool), + SetupProcessor(Option), + ConfigureEndpoint(bool), + SetupComplete(bool), + FirstProcessorConnected(Option), + SecondProcessorConnected(Option), + ConfiguredRouting(Option), + TestPayment(Option), + IntegrationMethod(Option), + IntegrationCompleted(bool), + StripeConnected(Option), + PaypalConnected(Option), + SPRoutingConfigured(Option), + SPTestPayment(bool), + DownloadWoocom(bool), + ConfigureWoocom(bool), + SetupWoocomWebhook(bool), + IsMultipleConfiguration(bool), +} diff --git a/crates/api_models/src/user/sample_data.rs b/crates/api_models/src/user/sample_data.rs new file mode 100644 index 000000000000..6d20b20f369c --- /dev/null +++ b/crates/api_models/src/user/sample_data.rs @@ -0,0 +1,23 @@ +use common_enums::{AuthenticationType, CountryAlpha2}; +use common_utils::{self}; +use time::PrimitiveDateTime; + +use crate::enums::Connector; + +#[derive(serde::Deserialize, Debug, serde::Serialize)] +pub struct SampleDataRequest { + pub record: Option, + pub connector: Option>, + #[serde(default, with = "common_utils::custom_serde::iso8601::option")] + pub start_time: Option, + #[serde(default, with = "common_utils::custom_serde::iso8601::option")] + pub end_time: Option, + // The amount for each sample will be between min_amount and max_amount (in dollars) + pub min_amount: Option, + pub max_amount: Option, + pub currency: Option>, + pub auth_type: Option>, + pub business_country: Option, + pub business_label: Option, + pub profile_id: Option, +} diff --git a/crates/api_models/src/user_role.rs b/crates/api_models/src/user_role.rs new file mode 100644 index 000000000000..735cd240b6e7 --- /dev/null +++ b/crates/api_models/src/user_role.rs @@ -0,0 +1,88 @@ +#[derive(Debug, serde::Serialize)] +pub struct ListRolesResponse(pub Vec); + +#[derive(Debug, serde::Serialize)] +pub struct RoleInfoResponse { + pub role_id: &'static str, + pub permissions: Vec, + pub role_name: &'static str, +} + +#[derive(Debug, serde::Deserialize, serde::Serialize)] +pub struct GetRoleRequest { + pub role_id: String, +} + +#[derive(Debug, serde::Serialize)] +pub enum Permission { + PaymentRead, + PaymentWrite, + RefundRead, + RefundWrite, + ApiKeyRead, + ApiKeyWrite, + MerchantAccountRead, + MerchantAccountWrite, + MerchantConnectorAccountRead, + MerchantConnectorAccountWrite, + ForexRead, + RoutingRead, + RoutingWrite, + DisputeRead, + DisputeWrite, + MandateRead, + MandateWrite, + FileRead, + FileWrite, + Analytics, + ThreeDsDecisionManagerWrite, + ThreeDsDecisionManagerRead, + SurchargeDecisionManagerWrite, + SurchargeDecisionManagerRead, + UsersRead, + UsersWrite, +} + +#[derive(Debug, serde::Serialize)] +pub enum PermissionModule { + Payments, + Refunds, + MerchantAccount, + Forex, + Connectors, + Routing, + Analytics, + Mandates, + Disputes, + Files, + ThreeDsDecisionManager, + SurchargeDecisionManager, +} + +#[derive(Debug, serde::Serialize)] +pub struct AuthorizationInfoResponse(pub Vec); + +#[derive(Debug, serde::Serialize)] +pub struct ModuleInfo { + pub module: PermissionModule, + pub description: &'static str, + pub permissions: Vec, +} + +#[derive(Debug, serde::Serialize)] +pub struct PermissionInfo { + pub enum_name: Permission, + pub description: &'static str, +} + +#[derive(Debug, serde::Deserialize, serde::Serialize)] +pub struct UpdateUserRoleRequest { + pub user_id: String, + pub role_id: String, +} + +#[derive(Debug, serde::Serialize)] +pub enum UserStatus { + Active, + InvitationSent, +} diff --git a/crates/api_models/src/verify_connector.rs b/crates/api_models/src/verify_connector.rs new file mode 100644 index 000000000000..1db5a19a030a --- /dev/null +++ b/crates/api_models/src/verify_connector.rs @@ -0,0 +1,11 @@ +use common_utils::events::{ApiEventMetric, ApiEventsType}; + +use crate::{admin, enums}; + +#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)] +pub struct VerifyConnectorRequest { + pub connector_name: enums::Connector, + pub connector_account_details: admin::ConnectorAuthType, +} + +common_utils::impl_misc_api_event_type!(VerifyConnectorRequest); diff --git a/crates/common_enums/Cargo.toml b/crates/common_enums/Cargo.toml index cd061970bff3..72d9f6bb0bb1 100644 --- a/crates/common_enums/Cargo.toml +++ b/crates/common_enums/Cargo.toml @@ -8,7 +8,6 @@ readme = "README.md" license.workspace = true [features] -default = ["dummy_connector"] dummy_connector = [] [dependencies] diff --git a/crates/common_enums/src/enums.rs b/crates/common_enums/src/enums.rs index 3f343965130e..8da4a2da54cc 100644 --- a/crates/common_enums/src/enums.rs +++ b/crates/common_enums/src/enums.rs @@ -12,6 +12,7 @@ pub mod diesel_exports { DbFutureUsage as FutureUsage, DbIntentStatus as IntentStatus, DbMandateStatus as MandateStatus, DbPaymentMethodIssuerCode as PaymentMethodIssuerCode, DbPaymentType as PaymentType, DbRefundStatus as RefundStatus, + DbRequestIncrementalAuthorization as RequestIncrementalAuthorization, }; } @@ -1387,6 +1388,29 @@ pub enum CountryAlpha2 { US } +#[derive( + Clone, + Debug, + Copy, + Default, + Eq, + Hash, + PartialEq, + serde::Deserialize, + serde::Serialize, + strum::Display, + strum::EnumString, +)] +#[router_derive::diesel_enum(storage_type = "db_enum")] +#[serde(rename_all = "snake_case")] +#[strum(serialize_all = "snake_case")] +pub enum RequestIncrementalAuthorization { + True, + False, + #[default] + Default, +} + #[derive(Clone, Copy, Debug, Serialize, Deserialize)] #[rustfmt::skip] pub enum CountryAlpha3 { diff --git a/crates/data_models/Cargo.toml b/crates/data_models/Cargo.toml index 57ae1ec1ec87..857d53b6999e 100644 --- a/crates/data_models/Cargo.toml +++ b/crates/data_models/Cargo.toml @@ -18,7 +18,6 @@ common_enums = { version = "0.1.0", path = "../common_enums" } common_utils = { version = "0.1.0", path = "../common_utils" } masking = { version = "0.1.0", path = "../masking" } - # Third party deps async-trait = "0.1.68" error-stack = "0.3.1" diff --git a/crates/data_models/src/payments.rs b/crates/data_models/src/payments.rs index 4e7a0923f6a9..af2076bfa10d 100644 --- a/crates/data_models/src/payments.rs +++ b/crates/data_models/src/payments.rs @@ -50,4 +50,6 @@ pub struct PaymentIntent { pub updated_by: String, pub surcharge_applicable: Option, + pub request_incremental_authorization: storage_enums::RequestIncrementalAuthorization, + pub incremental_authorization_allowed: Option, } diff --git a/crates/data_models/src/payments/payment_intent.rs b/crates/data_models/src/payments/payment_intent.rs index 2c5914f5b37f..d8f927a4e2c5 100644 --- a/crates/data_models/src/payments/payment_intent.rs +++ b/crates/data_models/src/payments/payment_intent.rs @@ -107,6 +107,8 @@ pub struct PaymentIntentNew { pub updated_by: String, pub surcharge_applicable: Option, + pub request_incremental_authorization: storage_enums::RequestIncrementalAuthorization, + pub incremental_authorization_allowed: Option, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -116,6 +118,7 @@ pub enum PaymentIntentUpdate { amount_captured: Option, return_url: Option, updated_by: String, + incremental_authorization_allowed: Option, }, MetadataUpdate { metadata: pii::SecretSerdeValue, @@ -137,6 +140,7 @@ pub enum PaymentIntentUpdate { }, PGStatusUpdate { status: storage_enums::IntentStatus, + incremental_authorization_allowed: Option, updated_by: String, }, Update { @@ -213,6 +217,7 @@ pub struct PaymentIntentUpdateInternal { pub updated_by: String, pub surcharge_applicable: Option, + pub incremental_authorization_allowed: Option, } impl From for PaymentIntentUpdateInternal { @@ -283,10 +288,15 @@ impl From for PaymentIntentUpdateInternal { updated_by, ..Default::default() }, - PaymentIntentUpdate::PGStatusUpdate { status, updated_by } => Self { + PaymentIntentUpdate::PGStatusUpdate { + status, + updated_by, + incremental_authorization_allowed, + } => Self { status: Some(status), modified_at: Some(common_utils::date_time::now()), updated_by, + incremental_authorization_allowed, ..Default::default() }, PaymentIntentUpdate::MerchantStatusUpdate { @@ -310,6 +320,7 @@ impl From for PaymentIntentUpdateInternal { // customer_id, return_url, updated_by, + incremental_authorization_allowed, } => Self { // amount, // currency: Some(currency), @@ -319,6 +330,7 @@ impl From for PaymentIntentUpdateInternal { return_url, modified_at: Some(common_utils::date_time::now()), updated_by, + incremental_authorization_allowed, ..Default::default() }, PaymentIntentUpdate::PaymentAttemptAndAttemptCountUpdate { diff --git a/crates/diesel_models/src/business_profile.rs b/crates/diesel_models/src/business_profile.rs index 1f6c4f604958..700104aaaecc 100644 --- a/crates/diesel_models/src/business_profile.rs +++ b/crates/diesel_models/src/business_profile.rs @@ -103,25 +103,39 @@ impl From for BusinessProfile { impl BusinessProfileUpdateInternal { pub fn apply_changeset(self, source: BusinessProfile) -> BusinessProfile { + let Self { + profile_name, + modified_at: _, + return_url, + enable_payment_response_hash, + payment_response_hash_key, + redirect_to_merchant_with_http_post, + webhook_details, + metadata, + routing_algorithm, + intent_fulfillment_time, + frm_routing_algorithm, + payout_routing_algorithm, + is_recon_enabled, + applepay_verified_domains, + } = self; BusinessProfile { - profile_name: self.profile_name.unwrap_or(source.profile_name), - modified_at: self.modified_at.unwrap_or(source.modified_at), - return_url: self.return_url, - enable_payment_response_hash: self - .enable_payment_response_hash + profile_name: profile_name.unwrap_or(source.profile_name), + modified_at: common_utils::date_time::now(), + return_url, + enable_payment_response_hash: enable_payment_response_hash .unwrap_or(source.enable_payment_response_hash), - payment_response_hash_key: self.payment_response_hash_key, - redirect_to_merchant_with_http_post: self - .redirect_to_merchant_with_http_post + payment_response_hash_key, + redirect_to_merchant_with_http_post: redirect_to_merchant_with_http_post .unwrap_or(source.redirect_to_merchant_with_http_post), - webhook_details: self.webhook_details, - metadata: self.metadata, - routing_algorithm: self.routing_algorithm, - intent_fulfillment_time: self.intent_fulfillment_time, - frm_routing_algorithm: self.frm_routing_algorithm, - payout_routing_algorithm: self.payout_routing_algorithm, - is_recon_enabled: self.is_recon_enabled.unwrap_or(source.is_recon_enabled), - applepay_verified_domains: self.applepay_verified_domains, + webhook_details, + metadata, + routing_algorithm, + intent_fulfillment_time, + frm_routing_algorithm, + payout_routing_algorithm, + is_recon_enabled: is_recon_enabled.unwrap_or(source.is_recon_enabled), + applepay_verified_domains, ..source } } diff --git a/crates/diesel_models/src/capture.rs b/crates/diesel_models/src/capture.rs index 30eee900cff1..adc313ca3dde 100644 --- a/crates/diesel_models/src/capture.rs +++ b/crates/diesel_models/src/capture.rs @@ -83,13 +83,24 @@ pub struct CaptureUpdateInternal { impl CaptureUpdate { pub fn apply_changeset(self, source: Capture) -> Capture { - let capture_update: CaptureUpdateInternal = self.into(); + let CaptureUpdateInternal { + status, + error_message, + error_code, + error_reason, + modified_at: _, + connector_capture_id, + connector_response_reference_id, + } = self.into(); Capture { - status: capture_update.status.unwrap_or(source.status), - error_message: capture_update.error_message.or(source.error_message), - error_code: capture_update.error_code.or(source.error_code), - error_reason: capture_update.error_reason.or(source.error_reason), + status: status.unwrap_or(source.status), + error_message: error_message.or(source.error_message), + error_code: error_code.or(source.error_code), + error_reason: error_reason.or(source.error_reason), modified_at: common_utils::date_time::now(), + connector_capture_id: connector_capture_id.or(source.connector_capture_id), + connector_response_reference_id: connector_response_reference_id + .or(source.connector_response_reference_id), ..source } } diff --git a/crates/diesel_models/src/enums.rs b/crates/diesel_models/src/enums.rs index dc4a7614f587..3f8b37cd03f7 100644 --- a/crates/diesel_models/src/enums.rs +++ b/crates/diesel_models/src/enums.rs @@ -15,6 +15,7 @@ pub mod diesel_exports { DbPaymentType as PaymentType, DbPayoutStatus as PayoutStatus, DbPayoutType as PayoutType, DbProcessTrackerStatus as ProcessTrackerStatus, DbReconStatus as ReconStatus, DbRefundStatus as RefundStatus, DbRefundType as RefundType, + DbRequestIncrementalAuthorization as RequestIncrementalAuthorization, DbRoutingAlgorithmKind as RoutingAlgorithmKind, }; } @@ -425,3 +426,39 @@ pub enum UserStatus { #[default] InvitationSent, } + +#[derive( + Clone, + Copy, + Debug, + Eq, + PartialEq, + serde::Deserialize, + serde::Serialize, + strum::Display, + strum::EnumString, + frunk::LabelledGeneric, +)] +#[router_derive::diesel_enum(storage_type = "text")] +#[serde(rename_all = "snake_case")] +#[strum(serialize_all = "snake_case")] +pub enum DashboardMetadata { + ProductionAgreement, + SetupProcessor, + ConfigureEndpoint, + SetupComplete, + FirstProcessorConnected, + SecondProcessorConnected, + ConfiguredRouting, + TestPayment, + IntegrationMethod, + IntegrationCompleted, + StripeConnected, + PaypalConnected, + SpRoutingConfigured, + SpTestPayment, + DownloadWoocom, + ConfigureWoocom, + SetupWoocomWebhook, + IsMultipleConfiguration, +} diff --git a/crates/diesel_models/src/payment_attempt.rs b/crates/diesel_models/src/payment_attempt.rs index 9cc6632c638e..216801fa8fb1 100644 --- a/crates/diesel_models/src/payment_attempt.rs +++ b/crates/diesel_models/src/payment_attempt.rs @@ -314,60 +314,83 @@ pub struct PaymentAttemptUpdateInternal { impl PaymentAttemptUpdate { pub fn apply_changeset(self, source: PaymentAttempt) -> PaymentAttempt { - let pa_update: PaymentAttemptUpdateInternal = self.into(); + let PaymentAttemptUpdateInternal { + amount, + currency, + status, + connector_transaction_id, + amount_to_capture, + connector, + authentication_type, + payment_method, + error_message, + payment_method_id, + cancellation_reason, + modified_at: _, + mandate_id, + browser_info, + payment_token, + error_code, + connector_metadata, + payment_method_data, + payment_method_type, + payment_experience, + business_sub_label, + straight_through_algorithm, + preprocessing_step_id, + error_reason, + capture_method, + connector_response_reference_id, + multiple_capture_count, + surcharge_amount, + tax_amount, + amount_capturable, + updated_by, + merchant_connector_id, + authentication_data, + encoded_data, + unified_code, + unified_message, + } = self.into(); PaymentAttempt { - amount: pa_update.amount.unwrap_or(source.amount), - currency: pa_update.currency.or(source.currency), - status: pa_update.status.unwrap_or(source.status), - connector_transaction_id: pa_update - .connector_transaction_id - .or(source.connector_transaction_id), - amount_to_capture: pa_update.amount_to_capture.or(source.amount_to_capture), - connector: pa_update.connector.or(source.connector), - authentication_type: pa_update.authentication_type.or(source.authentication_type), - payment_method: pa_update.payment_method.or(source.payment_method), - error_message: pa_update.error_message.unwrap_or(source.error_message), - payment_method_id: pa_update - .payment_method_id - .unwrap_or(source.payment_method_id), - cancellation_reason: pa_update.cancellation_reason.or(source.cancellation_reason), + amount: amount.unwrap_or(source.amount), + currency: currency.or(source.currency), + status: status.unwrap_or(source.status), + connector_transaction_id: connector_transaction_id.or(source.connector_transaction_id), + amount_to_capture: amount_to_capture.or(source.amount_to_capture), + connector: connector.or(source.connector), + authentication_type: authentication_type.or(source.authentication_type), + payment_method: payment_method.or(source.payment_method), + error_message: error_message.unwrap_or(source.error_message), + payment_method_id: payment_method_id.unwrap_or(source.payment_method_id), + cancellation_reason: cancellation_reason.or(source.cancellation_reason), modified_at: common_utils::date_time::now(), - mandate_id: pa_update.mandate_id.or(source.mandate_id), - browser_info: pa_update.browser_info.or(source.browser_info), - payment_token: pa_update.payment_token.or(source.payment_token), - error_code: pa_update.error_code.unwrap_or(source.error_code), - connector_metadata: pa_update.connector_metadata.or(source.connector_metadata), - payment_method_data: pa_update.payment_method_data.or(source.payment_method_data), - payment_method_type: pa_update.payment_method_type.or(source.payment_method_type), - payment_experience: pa_update.payment_experience.or(source.payment_experience), - business_sub_label: pa_update.business_sub_label.or(source.business_sub_label), - straight_through_algorithm: pa_update - .straight_through_algorithm + mandate_id: mandate_id.or(source.mandate_id), + browser_info: browser_info.or(source.browser_info), + payment_token: payment_token.or(source.payment_token), + error_code: error_code.unwrap_or(source.error_code), + connector_metadata: connector_metadata.or(source.connector_metadata), + payment_method_data: payment_method_data.or(source.payment_method_data), + payment_method_type: payment_method_type.or(source.payment_method_type), + payment_experience: payment_experience.or(source.payment_experience), + business_sub_label: business_sub_label.or(source.business_sub_label), + straight_through_algorithm: straight_through_algorithm .or(source.straight_through_algorithm), - preprocessing_step_id: pa_update - .preprocessing_step_id - .or(source.preprocessing_step_id), - error_reason: pa_update.error_reason.unwrap_or(source.error_reason), - capture_method: pa_update.capture_method.or(source.capture_method), - connector_response_reference_id: pa_update - .connector_response_reference_id + preprocessing_step_id: preprocessing_step_id.or(source.preprocessing_step_id), + error_reason: error_reason.unwrap_or(source.error_reason), + capture_method: capture_method.or(source.capture_method), + connector_response_reference_id: connector_response_reference_id .or(source.connector_response_reference_id), - multiple_capture_count: pa_update - .multiple_capture_count - .or(source.multiple_capture_count), - surcharge_amount: pa_update.surcharge_amount.or(source.surcharge_amount), - tax_amount: pa_update.tax_amount.or(source.tax_amount), - amount_capturable: pa_update - .amount_capturable - .unwrap_or(source.amount_capturable), - updated_by: pa_update.updated_by, - merchant_connector_id: pa_update - .merchant_connector_id - .or(source.merchant_connector_id), - authentication_data: pa_update.authentication_data.or(source.authentication_data), - encoded_data: pa_update.encoded_data.or(source.encoded_data), - unified_code: pa_update.unified_code.unwrap_or(source.unified_code), - unified_message: pa_update.unified_message.unwrap_or(source.unified_message), + multiple_capture_count: multiple_capture_count.or(source.multiple_capture_count), + surcharge_amount: surcharge_amount.or(source.surcharge_amount), + tax_amount: tax_amount.or(source.tax_amount), + amount_capturable: amount_capturable.unwrap_or(source.amount_capturable), + updated_by, + merchant_connector_id: merchant_connector_id.or(source.merchant_connector_id), + authentication_data: authentication_data.or(source.authentication_data), + encoded_data: encoded_data.or(source.encoded_data), + unified_code: unified_code.unwrap_or(source.unified_code), + unified_message: unified_message.unwrap_or(source.unified_message), ..source } } diff --git a/crates/diesel_models/src/payment_intent.rs b/crates/diesel_models/src/payment_intent.rs index 2ffa857026ba..8d752466103e 100644 --- a/crates/diesel_models/src/payment_intent.rs +++ b/crates/diesel_models/src/payment_intent.rs @@ -1,3 +1,4 @@ +use common_enums::RequestIncrementalAuthorization; use common_utils::pii; use diesel::{AsChangeset, Identifiable, Insertable, Queryable}; use serde::{Deserialize, Serialize}; @@ -51,6 +52,8 @@ pub struct PaymentIntent { pub updated_by: String, pub surcharge_applicable: Option, + pub request_incremental_authorization: RequestIncrementalAuthorization, + pub incremental_authorization_allowed: Option, } #[derive( @@ -106,6 +109,8 @@ pub struct PaymentIntentNew { pub updated_by: String, pub surcharge_applicable: Option, + pub request_incremental_authorization: RequestIncrementalAuthorization, + pub incremental_authorization_allowed: Option, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -115,6 +120,7 @@ pub enum PaymentIntentUpdate { amount_captured: Option, return_url: Option, updated_by: String, + incremental_authorization_allowed: Option, }, MetadataUpdate { metadata: pii::SecretSerdeValue, @@ -137,6 +143,7 @@ pub enum PaymentIntentUpdate { PGStatusUpdate { status: storage_enums::IntentStatus, updated_by: String, + incremental_authorization_allowed: Option, }, Update { amount: i64, @@ -213,54 +220,69 @@ pub struct PaymentIntentUpdateInternal { pub updated_by: String, pub surcharge_applicable: Option, + pub incremental_authorization_allowed: Option, } impl PaymentIntentUpdate { pub fn apply_changeset(self, source: PaymentIntent) -> PaymentIntent { - let internal_update: PaymentIntentUpdateInternal = self.into(); + let PaymentIntentUpdateInternal { + amount, + currency, + status, + amount_captured, + customer_id, + return_url, + setup_future_usage, + off_session, + metadata, + billing_address_id, + shipping_address_id, + modified_at: _, + active_attempt_id, + business_country, + business_label, + description, + statement_descriptor_name, + statement_descriptor_suffix, + order_details, + attempt_count, + profile_id, + merchant_decision, + payment_confirm_source, + updated_by, + surcharge_applicable, + incremental_authorization_allowed, + } = self.into(); PaymentIntent { - amount: internal_update.amount.unwrap_or(source.amount), - currency: internal_update.currency.or(source.currency), - status: internal_update.status.unwrap_or(source.status), - amount_captured: internal_update.amount_captured.or(source.amount_captured), - customer_id: internal_update.customer_id.or(source.customer_id), - return_url: internal_update.return_url.or(source.return_url), - setup_future_usage: internal_update - .setup_future_usage - .or(source.setup_future_usage), - off_session: internal_update.off_session.or(source.off_session), - metadata: internal_update.metadata.or(source.metadata), - billing_address_id: internal_update - .billing_address_id - .or(source.billing_address_id), - shipping_address_id: internal_update - .shipping_address_id - .or(source.shipping_address_id), + amount: amount.unwrap_or(source.amount), + currency: currency.or(source.currency), + status: status.unwrap_or(source.status), + amount_captured: amount_captured.or(source.amount_captured), + customer_id: customer_id.or(source.customer_id), + return_url: return_url.or(source.return_url), + setup_future_usage: setup_future_usage.or(source.setup_future_usage), + off_session: off_session.or(source.off_session), + metadata: metadata.or(source.metadata), + billing_address_id: billing_address_id.or(source.billing_address_id), + shipping_address_id: shipping_address_id.or(source.shipping_address_id), modified_at: common_utils::date_time::now(), - active_attempt_id: internal_update - .active_attempt_id - .unwrap_or(source.active_attempt_id), - business_country: internal_update.business_country.or(source.business_country), - business_label: internal_update.business_label.or(source.business_label), - description: internal_update.description.or(source.description), - statement_descriptor_name: internal_update - .statement_descriptor_name + active_attempt_id: active_attempt_id.unwrap_or(source.active_attempt_id), + business_country: business_country.or(source.business_country), + business_label: business_label.or(source.business_label), + description: description.or(source.description), + statement_descriptor_name: statement_descriptor_name .or(source.statement_descriptor_name), - statement_descriptor_suffix: internal_update - .statement_descriptor_suffix + statement_descriptor_suffix: statement_descriptor_suffix .or(source.statement_descriptor_suffix), - order_details: internal_update.order_details.or(source.order_details), - attempt_count: internal_update - .attempt_count - .unwrap_or(source.attempt_count), - profile_id: internal_update.profile_id.or(source.profile_id), - merchant_decision: internal_update - .merchant_decision - .or(source.merchant_decision), - payment_confirm_source: internal_update - .payment_confirm_source - .or(source.payment_confirm_source), - updated_by: internal_update.updated_by, + order_details: order_details.or(source.order_details), + attempt_count: attempt_count.unwrap_or(source.attempt_count), + profile_id: profile_id.or(source.profile_id), + merchant_decision: merchant_decision.or(source.merchant_decision), + payment_confirm_source: payment_confirm_source.or(source.payment_confirm_source), + updated_by, + surcharge_applicable: surcharge_applicable.or(source.surcharge_applicable), + + incremental_authorization_allowed, ..source } } @@ -334,10 +356,15 @@ impl From for PaymentIntentUpdateInternal { updated_by, ..Default::default() }, - PaymentIntentUpdate::PGStatusUpdate { status, updated_by } => Self { + PaymentIntentUpdate::PGStatusUpdate { + status, + updated_by, + incremental_authorization_allowed, + } => Self { status: Some(status), modified_at: Some(common_utils::date_time::now()), updated_by, + incremental_authorization_allowed, ..Default::default() }, PaymentIntentUpdate::MerchantStatusUpdate { @@ -361,6 +388,7 @@ impl From for PaymentIntentUpdateInternal { // customer_id, return_url, updated_by, + incremental_authorization_allowed, } => Self { // amount, // currency: Some(currency), @@ -370,6 +398,7 @@ impl From for PaymentIntentUpdateInternal { return_url, modified_at: Some(common_utils::date_time::now()), updated_by, + incremental_authorization_allowed, ..Default::default() }, PaymentIntentUpdate::PaymentAttemptAndAttemptCountUpdate { diff --git a/crates/diesel_models/src/query.rs b/crates/diesel_models/src/query.rs index cf5a993c2686..b0537d0a287b 100644 --- a/crates/diesel_models/src/query.rs +++ b/crates/diesel_models/src/query.rs @@ -6,6 +6,7 @@ pub mod cards_info; pub mod configs; pub mod customers; +pub mod dashboard_metadata; pub mod dispute; pub mod events; pub mod file; diff --git a/crates/diesel_models/src/query/dashboard_metadata.rs b/crates/diesel_models/src/query/dashboard_metadata.rs new file mode 100644 index 000000000000..44fd24c7acf2 --- /dev/null +++ b/crates/diesel_models/src/query/dashboard_metadata.rs @@ -0,0 +1,92 @@ +use diesel::{associations::HasTable, BoolExpressionMethods, ExpressionMethods}; +use router_env::tracing::{self, instrument}; + +use crate::{ + enums, + query::generics, + schema::dashboard_metadata::dsl, + user::dashboard_metadata::{ + DashboardMetadata, DashboardMetadataNew, DashboardMetadataUpdate, + DashboardMetadataUpdateInternal, + }, + PgPooledConn, StorageResult, +}; + +impl DashboardMetadataNew { + #[instrument(skip(conn))] + pub async fn insert(self, conn: &PgPooledConn) -> StorageResult { + generics::generic_insert(conn, self).await + } +} + +impl DashboardMetadata { + pub async fn update( + conn: &PgPooledConn, + user_id: Option, + merchant_id: String, + org_id: String, + data_key: enums::DashboardMetadata, + dashboard_metadata_update: DashboardMetadataUpdate, + ) -> StorageResult { + generics::generic_update_with_unique_predicate_get_result::< + ::Table, + _, + _, + _, + >( + conn, + dsl::user_id + .eq(user_id.to_owned()) + .and(dsl::merchant_id.eq(merchant_id.to_owned())) + .and(dsl::org_id.eq(org_id.to_owned())) + .and(dsl::data_key.eq(data_key.to_owned())), + DashboardMetadataUpdateInternal::from(dashboard_metadata_update), + ) + .await + } + + pub async fn find_user_scoped_dashboard_metadata( + conn: &PgPooledConn, + user_id: String, + merchant_id: String, + org_id: String, + data_types: Vec, + ) -> StorageResult> { + let predicate = dsl::user_id + .eq(user_id) + .and(dsl::merchant_id.eq(merchant_id)) + .and(dsl::org_id.eq(org_id)) + .and(dsl::data_key.eq_any(data_types)); + + generics::generic_filter::<::Table, _, _, _>( + conn, + predicate, + None, + None, + Some(dsl::last_modified_at.asc()), + ) + .await + } + + pub async fn find_merchant_scoped_dashboard_metadata( + conn: &PgPooledConn, + merchant_id: String, + org_id: String, + data_types: Vec, + ) -> StorageResult> { + let predicate = dsl::user_id + .is_null() + .and(dsl::merchant_id.eq(merchant_id)) + .and(dsl::org_id.eq(org_id)) + .and(dsl::data_key.eq_any(data_types)); + + generics::generic_filter::<::Table, _, _, _>( + conn, + predicate, + None, + None, + Some(dsl::last_modified_at.asc()), + ) + .await + } +} diff --git a/crates/diesel_models/src/query/user.rs b/crates/diesel_models/src/query/user.rs index 5761d8af814d..b4d5976ba294 100644 --- a/crates/diesel_models/src/query/user.rs +++ b/crates/diesel_models/src/query/user.rs @@ -1,12 +1,24 @@ -use diesel::{associations::HasTable, ExpressionMethods}; -use error_stack::report; -use router_env::tracing::{self, instrument}; +use async_bb8_diesel::AsyncRunQueryDsl; +use diesel::{ + associations::HasTable, debug_query, result::Error as DieselError, ExpressionMethods, + JoinOnDsl, QueryDsl, +}; +use error_stack::{report, IntoReport}; +use router_env::{ + logger, + tracing::{self, instrument}, +}; +pub mod sample_data; use crate::{ errors::{self}, query::generics, - schema::users::dsl, + schema::{ + user_roles::{self, dsl as user_roles_dsl}, + users::dsl as users_dsl, + }, user::*, + user_role::UserRole, PgPooledConn, StorageResult, }; @@ -21,7 +33,7 @@ impl User { pub async fn find_by_user_email(conn: &PgPooledConn, user_email: &str) -> StorageResult { generics::generic_find_one::<::Table, _, _>( conn, - dsl::email.eq(user_email.to_owned()), + users_dsl::email.eq(user_email.to_owned()), ) .await } @@ -29,7 +41,7 @@ impl User { pub async fn find_by_user_id(conn: &PgPooledConn, user_id: &str) -> StorageResult { generics::generic_find_one::<::Table, _, _>( conn, - dsl::user_id.eq(user_id.to_owned()), + users_dsl::user_id.eq(user_id.to_owned()), ) .await } @@ -41,7 +53,7 @@ impl User { ) -> StorageResult { generics::generic_update_with_results::<::Table, _, _, _>( conn, - dsl::user_id.eq(user_id.to_owned()), + users_dsl::user_id.eq(user_id.to_owned()), UserUpdateInternal::from(user), ) .await? @@ -55,8 +67,28 @@ impl User { pub async fn delete_by_user_id(conn: &PgPooledConn, user_id: &str) -> StorageResult { generics::generic_delete::<::Table, _>( conn, - dsl::user_id.eq(user_id.to_owned()), + users_dsl::user_id.eq(user_id.to_owned()), ) .await } + + pub async fn find_joined_users_and_roles_by_merchant_id( + conn: &PgPooledConn, + mid: &str, + ) -> StorageResult> { + let query = Self::table() + .inner_join(user_roles::table.on(user_roles_dsl::user_id.eq(users_dsl::user_id))) + .filter(user_roles_dsl::merchant_id.eq(mid.to_owned())); + + logger::debug!(query = %debug_query::(&query).to_string()); + + query + .get_results_async::<(Self, UserRole)>(conn) + .await + .into_report() + .map_err(|err| match err.current_context() { + DieselError::NotFound => err.change_context(errors::DatabaseError::NotFound), + _ => err.change_context(errors::DatabaseError::Others), + }) + } } diff --git a/crates/diesel_models/src/query/user/sample_data.rs b/crates/diesel_models/src/query/user/sample_data.rs new file mode 100644 index 000000000000..a8ec2c3b0a4f --- /dev/null +++ b/crates/diesel_models/src/query/user/sample_data.rs @@ -0,0 +1,139 @@ +use async_bb8_diesel::AsyncRunQueryDsl; +use diesel::{associations::HasTable, debug_query, ExpressionMethods, TextExpressionMethods}; +use error_stack::{IntoReport, ResultExt}; +use router_env::logger; + +use crate::{ + errors, + schema::{ + payment_attempt::dsl as payment_attempt_dsl, payment_intent::dsl as payment_intent_dsl, + refund::dsl as refund_dsl, + }, + user::sample_data::PaymentAttemptBatchNew, + PaymentAttempt, PaymentIntent, PaymentIntentNew, PgPooledConn, Refund, RefundNew, + StorageResult, +}; + +pub async fn insert_payment_intents( + conn: &PgPooledConn, + batch: Vec, +) -> StorageResult> { + let query = diesel::insert_into(::table()).values(batch); + + logger::debug!(query = %debug_query::(&query).to_string()); + + query + .get_results_async(conn) + .await + .into_report() + .change_context(errors::DatabaseError::Others) + .attach_printable("Error while inserting payment intents") +} +pub async fn insert_payment_attempts( + conn: &PgPooledConn, + batch: Vec, +) -> StorageResult> { + let query = diesel::insert_into(::table()).values(batch); + + logger::debug!(query = %debug_query::(&query).to_string()); + + query + .get_results_async(conn) + .await + .into_report() + .change_context(errors::DatabaseError::Others) + .attach_printable("Error while inserting payment attempts") +} + +pub async fn insert_refunds( + conn: &PgPooledConn, + batch: Vec, +) -> StorageResult> { + let query = diesel::insert_into(::table()).values(batch); + + logger::debug!(query = %debug_query::(&query).to_string()); + + query + .get_results_async(conn) + .await + .into_report() + .change_context(errors::DatabaseError::Others) + .attach_printable("Error while inserting refunds") +} + +pub async fn delete_payment_intents( + conn: &PgPooledConn, + merchant_id: &str, +) -> StorageResult> { + let query = diesel::delete(::table()) + .filter(payment_intent_dsl::merchant_id.eq(merchant_id.to_owned())) + .filter(payment_intent_dsl::payment_id.like("test_%")); + + logger::debug!(query = %debug_query::(&query).to_string()); + + query + .get_results_async(conn) + .await + .into_report() + .change_context(errors::DatabaseError::Others) + .attach_printable("Error while deleting payment intents") + .and_then(|result| match result.len() { + n if n > 0 => { + logger::debug!("{n} records deleted"); + Ok(result) + } + 0 => Err(error_stack::report!(errors::DatabaseError::NotFound) + .attach_printable("No records deleted")), + _ => Ok(result), + }) +} +pub async fn delete_payment_attempts( + conn: &PgPooledConn, + merchant_id: &str, +) -> StorageResult> { + let query = diesel::delete(::table()) + .filter(payment_attempt_dsl::merchant_id.eq(merchant_id.to_owned())) + .filter(payment_attempt_dsl::payment_id.like("test_%")); + + logger::debug!(query = %debug_query::(&query).to_string()); + + query + .get_results_async(conn) + .await + .into_report() + .change_context(errors::DatabaseError::Others) + .attach_printable("Error while deleting payment attempts") + .and_then(|result| match result.len() { + n if n > 0 => { + logger::debug!("{n} records deleted"); + Ok(result) + } + 0 => Err(error_stack::report!(errors::DatabaseError::NotFound) + .attach_printable("No records deleted")), + _ => Ok(result), + }) +} + +pub async fn delete_refunds(conn: &PgPooledConn, merchant_id: &str) -> StorageResult> { + let query = diesel::delete(::table()) + .filter(refund_dsl::merchant_id.eq(merchant_id.to_owned())) + .filter(refund_dsl::payment_id.like("test_%")); + + logger::debug!(query = %debug_query::(&query).to_string()); + + query + .get_results_async(conn) + .await + .into_report() + .change_context(errors::DatabaseError::Others) + .attach_printable("Error while deleting refunds") + .and_then(|result| match result.len() { + n if n > 0 => { + logger::debug!("{n} records deleted"); + Ok(result) + } + 0 => Err(error_stack::report!(errors::DatabaseError::NotFound) + .attach_printable("No records deleted")), + _ => Ok(result), + }) +} diff --git a/crates/diesel_models/src/refund.rs b/crates/diesel_models/src/refund.rs index 62aec3fb27d8..bb805fb646c5 100644 --- a/crates/diesel_models/src/refund.rs +++ b/crates/diesel_models/src/refund.rs @@ -202,19 +202,27 @@ impl From for RefundUpdateInternal { impl RefundUpdate { pub fn apply_changeset(self, source: Refund) -> Refund { - let pa_update: RefundUpdateInternal = self.into(); + let RefundUpdateInternal { + connector_refund_id, + refund_status, + sent_to_gateway, + refund_error_message, + refund_arn, + metadata, + refund_reason, + refund_error_code, + updated_by, + } = self.into(); Refund { - connector_refund_id: pa_update.connector_refund_id.or(source.connector_refund_id), - refund_status: pa_update.refund_status.unwrap_or(source.refund_status), - sent_to_gateway: pa_update.sent_to_gateway.unwrap_or(source.sent_to_gateway), - refund_error_message: pa_update - .refund_error_message - .or(source.refund_error_message), - refund_error_code: pa_update.refund_error_code.or(source.refund_error_code), - refund_arn: pa_update.refund_arn.or(source.refund_arn), - metadata: pa_update.metadata.or(source.metadata), - refund_reason: pa_update.refund_reason.or(source.refund_reason), - updated_by: pa_update.updated_by, + connector_refund_id: connector_refund_id.or(source.connector_refund_id), + refund_status: refund_status.unwrap_or(source.refund_status), + sent_to_gateway: sent_to_gateway.unwrap_or(source.sent_to_gateway), + refund_error_message: refund_error_message.or(source.refund_error_message), + refund_error_code: refund_error_code.or(source.refund_error_code), + refund_arn: refund_arn.or(source.refund_arn), + metadata: metadata.or(source.metadata), + refund_reason: refund_reason.or(source.refund_reason), + updated_by, ..source } } diff --git a/crates/diesel_models/src/schema.rs b/crates/diesel_models/src/schema.rs index 33400635f052..13b001ecc6d1 100644 --- a/crates/diesel_models/src/schema.rs +++ b/crates/diesel_models/src/schema.rs @@ -183,6 +183,30 @@ diesel::table! { } } +diesel::table! { + use diesel::sql_types::*; + use crate::enums::diesel_exports::*; + + dashboard_metadata (id) { + id -> Int4, + #[max_length = 64] + user_id -> Nullable, + #[max_length = 64] + merchant_id -> Varchar, + #[max_length = 64] + org_id -> Varchar, + #[max_length = 64] + data_key -> Varchar, + data_value -> Json, + #[max_length = 64] + created_by -> Varchar, + created_at -> Timestamp, + #[max_length = 64] + last_modified_by -> Varchar, + last_modified_at -> Timestamp, + } +} + diesel::table! { use diesel::sql_types::*; use crate::enums::diesel_exports::*; @@ -654,6 +678,8 @@ diesel::table! { #[max_length = 32] updated_by -> Varchar, surcharge_applicable -> Nullable, + request_incremental_authorization -> RequestIncrementalAuthorization, + incremental_authorization_allowed -> Nullable, } } @@ -965,6 +991,7 @@ diesel::allow_tables_to_appear_in_same_query!( cards_info, configs, customers, + dashboard_metadata, dispute, events, file_metadata, diff --git a/crates/diesel_models/src/user.rs b/crates/diesel_models/src/user.rs index 6a2e864b291c..c608f2654c6a 100644 --- a/crates/diesel_models/src/user.rs +++ b/crates/diesel_models/src/user.rs @@ -5,6 +5,9 @@ use time::PrimitiveDateTime; use crate::schema::users; +pub mod dashboard_metadata; + +pub mod sample_data; #[derive(Clone, Debug, Identifiable, Queryable)] #[diesel(table_name = users)] pub struct User { diff --git a/crates/diesel_models/src/user/dashboard_metadata.rs b/crates/diesel_models/src/user/dashboard_metadata.rs new file mode 100644 index 000000000000..1eeb61d6135e --- /dev/null +++ b/crates/diesel_models/src/user/dashboard_metadata.rs @@ -0,0 +1,72 @@ +use diesel::{query_builder::AsChangeset, Identifiable, Insertable, Queryable}; +use time::PrimitiveDateTime; + +use crate::{enums, schema::dashboard_metadata}; + +#[derive(Clone, Debug, Identifiable, Queryable)] +#[diesel(table_name = dashboard_metadata)] +pub struct DashboardMetadata { + pub id: i32, + pub user_id: Option, + pub merchant_id: String, + pub org_id: String, + pub data_key: enums::DashboardMetadata, + pub data_value: serde_json::Value, + pub created_by: String, + pub created_at: PrimitiveDateTime, + pub last_modified_by: String, + pub last_modified_at: PrimitiveDateTime, +} + +#[derive( + router_derive::Setter, Clone, Debug, Insertable, router_derive::DebugAsDisplay, AsChangeset, +)] +#[diesel(table_name = dashboard_metadata)] +pub struct DashboardMetadataNew { + pub user_id: Option, + pub merchant_id: String, + pub org_id: String, + pub data_key: enums::DashboardMetadata, + pub data_value: serde_json::Value, + pub created_by: String, + pub created_at: PrimitiveDateTime, + pub last_modified_by: String, + pub last_modified_at: PrimitiveDateTime, +} + +#[derive( + router_derive::Setter, Clone, Debug, Insertable, router_derive::DebugAsDisplay, AsChangeset, +)] +#[diesel(table_name = dashboard_metadata)] +pub struct DashboardMetadataUpdateInternal { + pub data_key: enums::DashboardMetadata, + pub data_value: serde_json::Value, + pub last_modified_by: String, + pub last_modified_at: PrimitiveDateTime, +} + +pub enum DashboardMetadataUpdate { + UpdateData { + data_key: enums::DashboardMetadata, + data_value: serde_json::Value, + last_modified_by: String, + }, +} + +impl From for DashboardMetadataUpdateInternal { + fn from(metadata_update: DashboardMetadataUpdate) -> Self { + let last_modified_at = common_utils::date_time::now(); + match metadata_update { + DashboardMetadataUpdate::UpdateData { + data_key, + data_value, + last_modified_by, + } => Self { + data_key, + data_value, + last_modified_by, + last_modified_at, + }, + } + } +} diff --git a/crates/diesel_models/src/user/sample_data.rs b/crates/diesel_models/src/user/sample_data.rs new file mode 100644 index 000000000000..959d1ad9ee7e --- /dev/null +++ b/crates/diesel_models/src/user/sample_data.rs @@ -0,0 +1,119 @@ +use common_enums::{ + AttemptStatus, AuthenticationType, CaptureMethod, Currency, PaymentExperience, PaymentMethod, + PaymentMethodType, +}; +use serde::{Deserialize, Serialize}; +use time::PrimitiveDateTime; + +use crate::{enums::MandateDataType, schema::payment_attempt, PaymentAttemptNew}; + +#[derive( + Clone, Debug, Default, diesel::Insertable, router_derive::DebugAsDisplay, Serialize, Deserialize, +)] +#[diesel(table_name = payment_attempt)] +pub struct PaymentAttemptBatchNew { + pub payment_id: String, + pub merchant_id: String, + pub attempt_id: String, + pub status: AttemptStatus, + pub amount: i64, + pub currency: Option, + pub save_to_locker: Option, + pub connector: Option, + pub error_message: Option, + pub offer_amount: Option, + pub surcharge_amount: Option, + pub tax_amount: Option, + pub payment_method_id: Option, + pub payment_method: Option, + pub capture_method: Option, + #[serde(default, with = "common_utils::custom_serde::iso8601::option")] + pub capture_on: Option, + pub confirm: bool, + pub authentication_type: Option, + #[serde(default, with = "common_utils::custom_serde::iso8601::option")] + pub created_at: Option, + #[serde(default, with = "common_utils::custom_serde::iso8601::option")] + pub modified_at: Option, + #[serde(default, with = "common_utils::custom_serde::iso8601::option")] + pub last_synced: Option, + pub cancellation_reason: Option, + pub amount_to_capture: Option, + pub mandate_id: Option, + pub browser_info: Option, + pub payment_token: Option, + pub error_code: Option, + pub connector_metadata: Option, + pub payment_experience: Option, + pub payment_method_type: Option, + pub payment_method_data: Option, + pub business_sub_label: Option, + pub straight_through_algorithm: Option, + pub preprocessing_step_id: Option, + pub mandate_details: Option, + pub error_reason: Option, + pub connector_response_reference_id: Option, + pub connector_transaction_id: Option, + pub multiple_capture_count: Option, + pub amount_capturable: i64, + pub updated_by: String, + pub merchant_connector_id: Option, + pub authentication_data: Option, + pub encoded_data: Option, + pub unified_code: Option, + pub unified_message: Option, +} + +#[allow(dead_code)] +impl PaymentAttemptBatchNew { + // Used to verify compatibility with PaymentAttemptTable + fn convert_into_normal_attempt_insert(self) -> PaymentAttemptNew { + PaymentAttemptNew { + payment_id: self.payment_id, + merchant_id: self.merchant_id, + attempt_id: self.attempt_id, + status: self.status, + amount: self.amount, + currency: self.currency, + save_to_locker: self.save_to_locker, + connector: self.connector, + error_message: self.error_message, + offer_amount: self.offer_amount, + surcharge_amount: self.surcharge_amount, + tax_amount: self.tax_amount, + payment_method_id: self.payment_method_id, + payment_method: self.payment_method, + capture_method: self.capture_method, + capture_on: self.capture_on, + confirm: self.confirm, + authentication_type: self.authentication_type, + created_at: self.created_at, + modified_at: self.modified_at, + last_synced: self.last_synced, + cancellation_reason: self.cancellation_reason, + amount_to_capture: self.amount_to_capture, + mandate_id: self.mandate_id, + browser_info: self.browser_info, + payment_token: self.payment_token, + error_code: self.error_code, + connector_metadata: self.connector_metadata, + payment_experience: self.payment_experience, + payment_method_type: self.payment_method_type, + payment_method_data: self.payment_method_data, + business_sub_label: self.business_sub_label, + straight_through_algorithm: self.straight_through_algorithm, + preprocessing_step_id: self.preprocessing_step_id, + mandate_details: self.mandate_details, + error_reason: self.error_reason, + multiple_capture_count: self.multiple_capture_count, + connector_response_reference_id: self.connector_response_reference_id, + amount_capturable: self.amount_capturable, + updated_by: self.updated_by, + merchant_connector_id: self.merchant_connector_id, + authentication_data: self.authentication_data, + encoded_data: self.encoded_data, + unified_code: self.unified_code, + unified_message: self.unified_message, + } + } +} diff --git a/crates/euclid_wasm/src/lib.rs b/crates/euclid_wasm/src/lib.rs index cab82f8ce411..78c7677fe75c 100644 --- a/crates/euclid_wasm/src/lib.rs +++ b/crates/euclid_wasm/src/lib.rs @@ -254,12 +254,25 @@ pub fn add_two(n1: i64, n2: i64) -> i64 { } #[wasm_bindgen(js_name = getDescriptionCategory)] -pub fn get_description_category(key: &str) -> JsResult { - let key = dir::DirKeyKind::from_str(key).map_err(|_| "Invalid key received".to_string())?; +pub fn get_description_category() -> JsResult { + let keys = dir::DirKeyKind::VARIANTS + .iter() + .copied() + .filter(|s| s != &"Connector") + .collect::>(); + let mut category: HashMap, Vec>> = HashMap::new(); + for key in keys { + let dir_key = + dir::DirKeyKind::from_str(key).map_err(|_| "Invalid key received".to_string())?; + let details = types::Details { + description: dir_key.get_detailed_message(), + kind: dir_key.clone(), + }; + category + .entry(dir_key.get_str("Category")) + .and_modify(|val| val.push(details.clone())) + .or_insert(vec![details]); + } - let result = types::Details { - description: key.get_detailed_message(), - category: key.get_str("Category"), - }; - Ok(serde_wasm_bindgen::to_value(&result)?) + Ok(serde_wasm_bindgen::to_value(&category)?) } diff --git a/crates/euclid_wasm/src/types.rs b/crates/euclid_wasm/src/types.rs index ea40449971bc..6353d9009c36 100644 --- a/crates/euclid_wasm/src/types.rs +++ b/crates/euclid_wasm/src/types.rs @@ -1,7 +1,8 @@ +use euclid::frontend::dir::DirKeyKind; use serde::Serialize; #[derive(Serialize, Clone)] pub struct Details<'a> { pub description: Option<&'a str>, - pub category: Option<&'a str>, + pub kind: DirKeyKind, } diff --git a/crates/external_services/Cargo.toml b/crates/external_services/Cargo.toml index 4700c2a81d75..54a636a382b2 100644 --- a/crates/external_services/Cargo.toml +++ b/crates/external_services/Cargo.toml @@ -16,6 +16,7 @@ async-trait = "0.1.68" aws-config = { version = "0.55.3", optional = true } aws-sdk-kms = { version = "0.28.0", optional = true } aws-sdk-sesv2 = "0.28.0" +aws-sdk-sts = "0.28.0" aws-smithy-client = "0.55.3" base64 = "0.21.2" dyn-clone = "1.0.11" @@ -24,6 +25,8 @@ once_cell = "1.18.0" serde = { version = "1.0.163", features = ["derive"] } thiserror = "1.0.40" tokio = "1.28.2" +hyper-proxy = "0.9.1" +hyper = "0.14.26" # First party crates common_utils = { version = "0.1.0", path = "../common_utils" } diff --git a/crates/external_services/src/email.rs b/crates/external_services/src/email.rs index b2bf99d8e01d..1d389f58298a 100644 --- a/crates/external_services/src/email.rs +++ b/crates/external_services/src/email.rs @@ -1,127 +1,163 @@ //! Interactions with the AWS SES SDK -use aws_config::meta::region::RegionProviderChain; -use aws_sdk_sesv2::{ - config::Region, - operation::send_email::SendEmailError, - types::{Body, Content, Destination, EmailContent, Message}, - Client, -}; +use aws_sdk_sesv2::types::Body; use common_utils::{errors::CustomResult, pii}; -use error_stack::{IntoReport, ResultExt}; -use masking::PeekInterface; use serde::Deserialize; +/// Implementation of aws ses client +pub mod ses; + /// Custom Result type alias for Email operations. pub type EmailResult = CustomResult; /// A trait that defines the methods that must be implemented to send email. #[async_trait::async_trait] pub trait EmailClient: Sync + Send + dyn_clone::DynClone { + /// The rich text type of the email client + type RichText; + /// Sends an email to the specified recipient with the given subject and body. async fn send_email( &self, recipient: pii::Email, subject: String, - body: String, + body: Self::RichText, + proxy_url: Option<&String>, + ) -> EmailResult<()>; + + /// Convert Stringified HTML to client native rich text format + /// This has to be done because not all clients may format html as the same + fn convert_to_rich_text( + &self, + intermediate_string: IntermediateString, + ) -> CustomResult + where + Self::RichText: Send; +} + +/// A super trait which is automatically implemented for all EmailClients +#[async_trait::async_trait] +pub trait EmailService: Sync + Send + dyn_clone::DynClone { + /// Compose and send email using the email data + async fn compose_and_send_email( + &self, + email_data: Box, + proxy_url: Option<&String>, ) -> EmailResult<()>; } -dyn_clone::clone_trait_object!(EmailClient); +#[async_trait::async_trait] +impl EmailService for T +where + T: EmailClient, + ::RichText: Send, +{ + async fn compose_and_send_email( + &self, + email_data: Box, + proxy_url: Option<&String>, + ) -> EmailResult<()> { + let email_data = email_data.get_email_data(); + let email_data = email_data.await?; + + let EmailContents { + subject, + body, + recipient, + } = email_data; + + let rich_text_string = self.convert_to_rich_text(body)?; + + self.send_email(recipient, subject, rich_text_string, proxy_url) + .await + } +} + +/// This is a struct used to create Intermediate String for rich text ( html ) +#[derive(Debug)] +pub struct IntermediateString(String); + +impl IntermediateString { + /// Create a new Instance of IntermediateString using a string + pub fn new(inner: String) -> Self { + Self(inner) + } + + /// Get the inner String + pub fn into_inner(self) -> String { + self.0 + } +} + +/// Temporary output for the email subject +#[derive(Debug)] +pub struct EmailContents { + /// The subject of email + pub subject: String, + + /// This will be the intermediate representation of the the email body in a generic format. + /// The email clients can convert this intermediate representation to their client specific rich text format + pub body: IntermediateString, + + /// The email of the recipient to whom the email has to be sent + pub recipient: pii::Email, +} + +/// A trait which will contain the logic of generating the email subject and body +#[async_trait::async_trait] +pub trait EmailData { + /// Get the email contents + async fn get_email_data(&self) -> CustomResult; +} + +dyn_clone::clone_trait_object!(EmailClient); + +/// List of available email clients to choose from +#[derive(Debug, Clone, Default, Deserialize)] +pub enum AvailableEmailClients { + #[default] + /// AWS ses email client + SES, +} /// Struct that contains the settings required to construct an EmailClient. #[derive(Debug, Clone, Default, Deserialize)] pub struct EmailSettings { - /// Sender email. - pub from_email: String, - /// The AWS region to send SES requests to. pub aws_region: String, /// Base-url used when adding links that should redirect to self pub base_url: String, -} -/// Client for AWS SES operation -#[derive(Debug, Clone)] -pub struct AwsSes { - ses_client: Client, - from_email: String, -} + /// Number of days for verification of the email + pub allowed_unverified_days: i64, -impl AwsSes { - /// Constructs a new AwsSes client - pub async fn new(conf: &EmailSettings) -> Self { - let region_provider = RegionProviderChain::first_try(Region::new(conf.aws_region.clone())); - let sdk_config = aws_config::from_env().region(region_provider).load().await; + /// Sender email + pub sender_email: String, - Self { - ses_client: Client::new(&sdk_config), - from_email: conf.from_email.clone(), - } - } -} + /// Configs related to AWS Simple Email Service + pub aws_ses: Option, -#[async_trait::async_trait] -impl EmailClient for AwsSes { - async fn send_email( - &self, - recipient: pii::Email, - subject: String, - body: String, - ) -> EmailResult<()> { - self.ses_client - .send_email() - .from_email_address(self.from_email.to_owned()) - .destination( - Destination::builder() - .to_addresses(recipient.peek()) - .build(), - ) - .content( - EmailContent::builder() - .simple( - Message::builder() - .subject(Content::builder().data(subject).build()) - .body( - Body::builder() - .text(Content::builder().data(body).charset("UTF-8").build()) - .build(), - ) - .build(), - ) - .build(), - ) - .send() - .await - .map_err(AwsSesError::SendingFailure) - .into_report() - .change_context(EmailError::EmailSendingFailure)?; - - Ok(()) - } + /// The active email client to use + pub active_email_client: AvailableEmailClients, } -#[allow(missing_docs)] /// Errors that could occur from EmailClient. #[derive(Debug, thiserror::Error)] pub enum EmailError { /// An error occurred when building email client. #[error("Error building email client")] ClientBuildingFailure, + /// An error occurred when sending email #[error("Error sending email to recipient")] EmailSendingFailure, + + /// Failed to generate the email token #[error("Failed to generate email token")] TokenGenerationFailure, + + /// The expected feature is not implemented #[error("Feature not implemented")] NotImplemented, } - -/// Errors that could occur during SES operations. -#[derive(Debug, thiserror::Error)] -pub enum AwsSesError { - /// An error occurred in the SDK while sending email. - #[error("Failed to Send Email {0:?}")] - SendingFailure(aws_smithy_client::SdkError), -} diff --git a/crates/external_services/src/email/ses.rs b/crates/external_services/src/email/ses.rs new file mode 100644 index 000000000000..7e521a5bc1c4 --- /dev/null +++ b/crates/external_services/src/email/ses.rs @@ -0,0 +1,257 @@ +use std::time::{Duration, SystemTime}; + +use aws_sdk_sesv2::{ + config::Region, + operation::send_email::SendEmailError, + types::{Body, Content, Destination, EmailContent, Message}, + Client, +}; +use aws_sdk_sts::config::Credentials; +use common_utils::{errors::CustomResult, ext_traits::OptionExt, pii}; +use error_stack::{report, IntoReport, ResultExt}; +use hyper::Uri; +use masking::PeekInterface; +use router_env::logger; +use tokio::sync::OnceCell; + +use crate::email::{EmailClient, EmailError, EmailResult, EmailSettings, IntermediateString}; + +/// Client for AWS SES operation +#[derive(Debug, Clone)] +pub struct AwsSes { + ses_client: OnceCell, + sender: String, + settings: EmailSettings, +} + +/// Struct that contains the AWS ses specific configs required to construct an SES email client +#[derive(Debug, Clone, Default, serde::Deserialize)] +pub struct SESConfig { + /// The arn of email role + pub email_role_arn: String, + + /// The name of sts_session role + pub sts_role_session_name: String, +} + +/// Errors that could occur during SES operations. +#[derive(Debug, thiserror::Error)] +pub enum AwsSesError { + /// An error occurred in the SDK while sending email. + #[error("Failed to Send Email {0:?}")] + SendingFailure(aws_smithy_client::SdkError), + + /// Configuration variable is missing to construct the email client + #[error("Missing configuration variable {0}")] + MissingConfigurationVariable(&'static str), + + /// Failed to assume the given STS role + #[error("Failed to STS assume role: Role ARN: {role_arn}, Session name: {session_name}, Region: {region}")] + AssumeRoleFailure { + /// Aws region + region: String, + + /// arn of email role + role_arn: String, + + /// The name of sts_session role + session_name: String, + }, + + /// Temporary credentials are missing + #[error("Assumed role does not contain credentials for role user: {0:?}")] + TemporaryCredentialsMissing(String), + + /// The proxy Connector cannot be built + #[error("The proxy build cannot be built")] + BuildingProxyConnectorFailed, +} + +impl AwsSes { + /// Constructs a new AwsSes client + pub async fn create(conf: &EmailSettings, proxy_url: Option>) -> Self { + Self { + ses_client: OnceCell::new_with( + Self::create_client(conf, proxy_url) + .await + .map_err(|error| logger::error!(?error, "Failed to initialize SES Client")) + .ok(), + ), + sender: conf.sender_email.clone(), + settings: conf.clone(), + } + } + + /// A helper function to create ses client + pub async fn create_client( + conf: &EmailSettings, + proxy_url: Option>, + ) -> CustomResult { + let sts_config = Self::get_shared_config(conf.aws_region.to_owned(), proxy_url.as_ref())? + .load() + .await; + + let ses_config = conf + .aws_ses + .as_ref() + .get_required_value("aws ses configuration") + .attach_printable("The selected email client is aws ses, but configuration is missing") + .change_context(AwsSesError::MissingConfigurationVariable("aws_ses"))?; + + let role = aws_sdk_sts::Client::new(&sts_config) + .assume_role() + .role_arn(&ses_config.email_role_arn) + .role_session_name(&ses_config.sts_role_session_name) + .send() + .await + .into_report() + .change_context(AwsSesError::AssumeRoleFailure { + region: conf.aws_region.to_owned(), + role_arn: ses_config.email_role_arn.to_owned(), + session_name: ses_config.sts_role_session_name.to_owned(), + })?; + + let creds = role.credentials().ok_or( + report!(AwsSesError::TemporaryCredentialsMissing(format!( + "{role:?}" + ))) + .attach_printable("Credentials object not available"), + )?; + + let credentials = Credentials::new( + creds + .access_key_id() + .ok_or( + report!(AwsSesError::TemporaryCredentialsMissing(format!( + "{role:?}" + ))) + .attach_printable("Access Key ID not found"), + )? + .to_owned(), + creds + .secret_access_key() + .ok_or( + report!(AwsSesError::TemporaryCredentialsMissing(format!( + "{role:?}" + ))) + .attach_printable("Secret Access Key not found"), + )? + .to_owned(), + creds.session_token().map(|s| s.to_owned()), + creds.expiration().and_then(|dt| { + SystemTime::UNIX_EPOCH + .checked_add(Duration::from_nanos(u64::try_from(dt.as_nanos()).ok()?)) + }), + "custom_provider", + ); + + logger::debug!( + "Obtained SES temporary credentials with expiry {:?}", + credentials.expiry() + ); + + let ses_config = Self::get_shared_config(conf.aws_region.to_owned(), proxy_url)? + .credentials_provider(credentials) + .load() + .await; + + Ok(Client::new(&ses_config)) + } + + fn get_shared_config( + region: String, + proxy_url: Option>, + ) -> CustomResult { + let region_provider = Region::new(region); + let mut config = aws_config::from_env().region(region_provider); + if let Some(proxy_url) = proxy_url { + let proxy_connector = Self::get_proxy_connector(proxy_url)?; + let provider_config = aws_config::provider_config::ProviderConfig::default() + .with_tcp_connector(proxy_connector.clone()); + let http_connector = + aws_smithy_client::hyper_ext::Adapter::builder().build(proxy_connector); + config = config + .configure(provider_config) + .http_connector(http_connector); + }; + Ok(config) + } + + fn get_proxy_connector( + proxy_url: impl AsRef, + ) -> CustomResult, AwsSesError> { + let proxy_uri = proxy_url + .as_ref() + .parse::() + .into_report() + .attach_printable("Unable to parse the proxy url {proxy_url}") + .change_context(AwsSesError::BuildingProxyConnectorFailed)?; + + let proxy = hyper_proxy::Proxy::new(hyper_proxy::Intercept::All, proxy_uri); + + hyper_proxy::ProxyConnector::from_proxy(hyper::client::HttpConnector::new(), proxy) + .into_report() + .change_context(AwsSesError::BuildingProxyConnectorFailed) + } +} + +#[async_trait::async_trait] +impl EmailClient for AwsSes { + type RichText = Body; + + fn convert_to_rich_text( + &self, + intermediate_string: IntermediateString, + ) -> CustomResult { + let email_body = Body::builder() + .html( + Content::builder() + .data(intermediate_string.into_inner()) + .charset("UTF-8") + .build(), + ) + .build(); + + Ok(email_body) + } + + async fn send_email( + &self, + recipient: pii::Email, + subject: String, + body: Self::RichText, + proxy_url: Option<&String>, + ) -> EmailResult<()> { + self.ses_client + .get_or_try_init(|| async { + Self::create_client(&self.settings, proxy_url) + .await + .change_context(EmailError::ClientBuildingFailure) + }) + .await? + .send_email() + .from_email_address(self.sender.to_owned()) + .destination( + Destination::builder() + .to_addresses(recipient.peek()) + .build(), + ) + .content( + EmailContent::builder() + .simple( + Message::builder() + .subject(Content::builder().data(subject).build()) + .body(body) + .build(), + ) + .build(), + ) + .send() + .await + .map_err(AwsSesError::SendingFailure) + .into_report() + .change_context(EmailError::EmailSendingFailure)?; + + Ok(()) + } +} diff --git a/crates/router/Cargo.toml b/crates/router/Cargo.toml index f0316d69249e..f508460574dd 100644 --- a/crates/router/Cargo.toml +++ b/crates/router/Cargo.toml @@ -12,11 +12,11 @@ license.workspace = true default = ["kv_store", "stripe", "oltp", "olap", "backwards_compatibility", "accounts_cache", "dummy_connector", "payouts", "profile_specific_fallback_routing", "retry"] s3 = ["dep:aws-sdk-s3", "dep:aws-config"] kms = ["external_services/kms", "dep:aws-config"] -email = ["external_services/email", "dep:aws-config"] +email = ["external_services/email", "dep:aws-config", "olap"] basilisk = ["kms"] stripe = ["dep:serde_qs"] release = ["kms", "stripe", "basilisk", "s3", "email", "business_profile_routing", "accounts_cache", "kv_store", "profile_specific_fallback_routing"] -olap = ["data_models/olap", "storage_impl/olap", "scheduler/olap"] +olap = ["data_models/olap", "storage_impl/olap", "scheduler/olap", "dep:analytics"] oltp = ["storage_impl/oltp"] kv_store = ["scheduler/kv_store"] accounts_cache = [] @@ -68,7 +68,7 @@ mime = "0.3.17" nanoid = "0.4.0" num_cpus = "1.15.0" once_cell = "1.18.0" -openssl = "0.10.55" +openssl = "0.10.60" qrcode = "0.12.0" rand = "0.8.5" rand_chacha = "0.3.1" @@ -102,6 +102,7 @@ tracing-futures = { version = "0.2.5", features = ["tokio"] } # First party crates api_models = { version = "0.1.0", path = "../api_models", features = ["errors"] } +analytics = { version = "0.1.0", path = "../analytics", optional = true } cards = { version = "0.1.0", path = "../cards" } common_enums = { version = "0.1.0", path = "../common_enums" } common_utils = { version = "0.1.0", path = "../common_utils", features = ["signals", "async_ext", "logs"] } @@ -118,6 +119,7 @@ router_env = { version = "0.1.0", path = "../router_env", features = ["log_extra scheduler = { version = "0.1.0", path = "../scheduler", default-features = false } storage_impl = { version = "0.1.0", path = "../storage_impl", default-features = false } erased-serde = "0.3.31" +rdkafka = "0.36.0" [build-dependencies] router_env = { version = "0.1.0", path = "../router_env", default-features = false } diff --git a/crates/router/src/analytics.rs b/crates/router/src/analytics.rs index d57403d92989..f31e908e0dc3 100644 --- a/crates/router/src/analytics.rs +++ b/crates/router/src/analytics.rs @@ -1,129 +1,560 @@ -mod core; -mod errors; -pub mod metrics; -mod payments; -mod query; -mod refunds; -pub mod routes; - -mod sqlx; -mod types; -mod utils; - -use api_models::analytics::{ - payments::{PaymentDimensions, PaymentFilters, PaymentMetrics, PaymentMetricsBucketIdentifier}, - refunds::{RefundDimensions, RefundFilters, RefundMetrics, RefundMetricsBucketIdentifier}, - Granularity, TimeRange, -}; -use router_env::{instrument, tracing}; - -use self::{ - payments::metrics::{PaymentMetric, PaymentMetricRow}, - refunds::metrics::{RefundMetric, RefundMetricRow}, - sqlx::SqlxClient, -}; -use crate::configs::settings::Database; - -#[derive(Clone, Debug)] -pub enum AnalyticsProvider { - Sqlx(SqlxClient), -} +pub use analytics::*; + +pub mod routes { + use actix_web::{web, Responder, Scope}; + use analytics::{ + api_event::api_events_core, errors::AnalyticsError, lambda_utils::invoke_lambda, + sdk_events::sdk_events_core, + }; + use api_models::analytics::{ + GenerateReportRequest, GetApiEventFiltersRequest, GetApiEventMetricRequest, + GetPaymentFiltersRequest, GetPaymentMetricRequest, GetRefundFilterRequest, + GetRefundMetricRequest, GetSdkEventFiltersRequest, GetSdkEventMetricRequest, ReportRequest, + }; + use error_stack::ResultExt; + use router_env::AnalyticsFlow; + + use crate::{ + core::api_locking, + db::user::UserInterface, + routes::AppState, + services::{ + api, + authentication::{self as auth, AuthToken, AuthenticationData}, + authorization::permissions::Permission, + ApplicationResponse, + }, + types::domain::UserEmail, + }; + + pub struct Analytics; + + impl Analytics { + pub fn server(state: AppState) -> Scope { + let mut route = web::scope("/analytics/v1").app_data(web::Data::new(state)); + { + route = route + .service( + web::resource("metrics/payments") + .route(web::post().to(get_payment_metrics)), + ) + .service( + web::resource("metrics/refunds").route(web::post().to(get_refunds_metrics)), + ) + .service( + web::resource("filters/payments") + .route(web::post().to(get_payment_filters)), + ) + .service( + web::resource("filters/refunds").route(web::post().to(get_refund_filters)), + ) + .service(web::resource("{domain}/info").route(web::get().to(get_info))) + .service( + web::resource("report/dispute") + .route(web::post().to(generate_dispute_report)), + ) + .service( + web::resource("report/refunds") + .route(web::post().to(generate_refund_report)), + ) + .service( + web::resource("report/payments") + .route(web::post().to(generate_payment_report)), + ) + .service( + web::resource("metrics/sdk_events") + .route(web::post().to(get_sdk_event_metrics)), + ) + .service( + web::resource("filters/sdk_events") + .route(web::post().to(get_sdk_event_filters)), + ) + .service(web::resource("api_event_logs").route(web::get().to(get_api_events))) + .service(web::resource("sdk_event_logs").route(web::post().to(get_sdk_events))) + .service( + web::resource("filters/api_events") + .route(web::post().to(get_api_event_filters)), + ) + .service( + web::resource("metrics/api_events") + .route(web::post().to(get_api_events_metrics)), + ) + } + route + } + } -impl Default for AnalyticsProvider { - fn default() -> Self { - Self::Sqlx(SqlxClient::default()) + pub async fn get_info( + state: web::Data, + req: actix_web::HttpRequest, + domain: actix_web::web::Path, + ) -> impl Responder { + let flow = AnalyticsFlow::GetInfo; + Box::pin(api::server_wrap( + flow, + state, + &req, + domain.into_inner(), + |_, _, domain| async { + analytics::core::get_domain_info(domain) + .await + .map(ApplicationResponse::Json) + }, + &auth::NoAuth, + api_locking::LockAction::NotApplicable, + )) + .await } -} -impl AnalyticsProvider { - #[instrument(skip_all)] + /// # Panics + /// + /// Panics if `json_payload` array does not contain one `GetPaymentMetricRequest` element. pub async fn get_payment_metrics( - &self, - metric: &PaymentMetrics, - dimensions: &[PaymentDimensions], - merchant_id: &str, - filters: &PaymentFilters, - granularity: &Option, - time_range: &TimeRange, - ) -> types::MetricsResult> { - // Metrics to get the fetch time for each payment metric - metrics::request::record_operation_time( - async { - match self { - Self::Sqlx(pool) => { - metric - .load_metrics( - dimensions, - merchant_id, - filters, - granularity, - time_range, - pool, - ) - .await - } - } + state: web::Data, + req: actix_web::HttpRequest, + json_payload: web::Json<[GetPaymentMetricRequest; 1]>, + ) -> impl Responder { + // safety: This shouldn't panic owing to the data type + #[allow(clippy::expect_used)] + let payload = json_payload + .into_inner() + .to_vec() + .pop() + .expect("Couldn't get GetPaymentMetricRequest"); + let flow = AnalyticsFlow::GetPaymentMetrics; + Box::pin(api::server_wrap( + flow, + state, + &req, + payload, + |state, auth: AuthenticationData, req| async move { + analytics::payments::get_metrics( + &state.pool, + &auth.merchant_account.merchant_id, + req, + ) + .await + .map(ApplicationResponse::Json) }, - &metrics::METRIC_FETCH_TIME, - metric, - self, - ) + &auth::JWTAuth(Permission::Analytics), + api_locking::LockAction::NotApplicable, + )) .await } - pub async fn get_refund_metrics( - &self, - metric: &RefundMetrics, - dimensions: &[RefundDimensions], - merchant_id: &str, - filters: &RefundFilters, - granularity: &Option, - time_range: &TimeRange, - ) -> types::MetricsResult> { - match self { - Self::Sqlx(pool) => { - metric - .load_metrics( - dimensions, - merchant_id, - filters, - granularity, - time_range, - pool, - ) + /// # Panics + /// + /// Panics if `json_payload` array does not contain one `GetRefundMetricRequest` element. + pub async fn get_refunds_metrics( + state: web::Data, + req: actix_web::HttpRequest, + json_payload: web::Json<[GetRefundMetricRequest; 1]>, + ) -> impl Responder { + #[allow(clippy::expect_used)] + // safety: This shouldn't panic owing to the data type + let payload = json_payload + .into_inner() + .to_vec() + .pop() + .expect("Couldn't get GetRefundMetricRequest"); + let flow = AnalyticsFlow::GetRefundsMetrics; + Box::pin(api::server_wrap( + flow, + state, + &req, + payload, + |state, auth: AuthenticationData, req| async move { + analytics::refunds::get_metrics( + &state.pool, + &auth.merchant_account.merchant_id, + req, + ) + .await + .map(ApplicationResponse::Json) + }, + &auth::JWTAuth(Permission::Analytics), + api_locking::LockAction::NotApplicable, + )) + .await + } + + /// # Panics + /// + /// Panics if `json_payload` array does not contain one `GetSdkEventMetricRequest` element. + pub async fn get_sdk_event_metrics( + state: web::Data, + req: actix_web::HttpRequest, + json_payload: web::Json<[GetSdkEventMetricRequest; 1]>, + ) -> impl Responder { + // safety: This shouldn't panic owing to the data type + #[allow(clippy::expect_used)] + let payload = json_payload + .into_inner() + .to_vec() + .pop() + .expect("Couldn't get GetSdkEventMetricRequest"); + let flow = AnalyticsFlow::GetSdkMetrics; + Box::pin(api::server_wrap( + flow, + state, + &req, + payload, + |state, auth: AuthenticationData, req| async move { + analytics::sdk_events::get_metrics( + &state.pool, + auth.merchant_account.publishable_key.as_ref(), + req, + ) + .await + .map(ApplicationResponse::Json) + }, + &auth::JWTAuth(Permission::Analytics), + api_locking::LockAction::NotApplicable, + )) + .await + } + + pub async fn get_payment_filters( + state: web::Data, + req: actix_web::HttpRequest, + json_payload: web::Json, + ) -> impl Responder { + let flow = AnalyticsFlow::GetPaymentFilters; + Box::pin(api::server_wrap( + flow, + state, + &req, + json_payload.into_inner(), + |state, auth: AuthenticationData, req| async move { + analytics::payments::get_filters( + &state.pool, + req, + &auth.merchant_account.merchant_id, + ) + .await + .map(ApplicationResponse::Json) + }, + &auth::JWTAuth(Permission::Analytics), + api_locking::LockAction::NotApplicable, + )) + .await + } + + pub async fn get_refund_filters( + state: web::Data, + req: actix_web::HttpRequest, + json_payload: web::Json, + ) -> impl Responder { + let flow = AnalyticsFlow::GetRefundFilters; + Box::pin(api::server_wrap( + flow, + state, + &req, + json_payload.into_inner(), + |state, auth: AuthenticationData, req: GetRefundFilterRequest| async move { + analytics::refunds::get_filters( + &state.pool, + req, + &auth.merchant_account.merchant_id, + ) + .await + .map(ApplicationResponse::Json) + }, + &auth::JWTAuth(Permission::Analytics), + api_locking::LockAction::NotApplicable, + )) + .await + } + + pub async fn get_sdk_event_filters( + state: web::Data, + req: actix_web::HttpRequest, + json_payload: web::Json, + ) -> impl Responder { + let flow = AnalyticsFlow::GetSdkEventFilters; + Box::pin(api::server_wrap( + flow, + state, + &req, + json_payload.into_inner(), + |state, auth: AuthenticationData, req| async move { + analytics::sdk_events::get_filters( + &state.pool, + req, + auth.merchant_account.publishable_key.as_ref(), + ) + .await + .map(ApplicationResponse::Json) + }, + &auth::JWTAuth(Permission::Analytics), + api_locking::LockAction::NotApplicable, + )) + .await + } + + pub async fn get_api_events( + state: web::Data, + req: actix_web::HttpRequest, + json_payload: web::Query, + ) -> impl Responder { + let flow = AnalyticsFlow::GetApiEvents; + Box::pin(api::server_wrap( + flow, + state, + &req, + json_payload.into_inner(), + |state, auth: AuthenticationData, req| async move { + api_events_core(&state.pool, req, auth.merchant_account.merchant_id) .await - } - } + .map(ApplicationResponse::Json) + }, + &auth::JWTAuth(Permission::Analytics), + api_locking::LockAction::NotApplicable, + )) + .await } - pub async fn from_conf( - config: &AnalyticsConfig, - #[cfg(feature = "kms")] kms_client: &external_services::kms::KmsClient, - ) -> Self { - match config { - AnalyticsConfig::Sqlx { sqlx } => Self::Sqlx( - SqlxClient::from_conf( - sqlx, - #[cfg(feature = "kms")] - kms_client, + pub async fn get_sdk_events( + state: web::Data, + req: actix_web::HttpRequest, + json_payload: web::Json, + ) -> impl Responder { + let flow = AnalyticsFlow::GetSdkEvents; + Box::pin(api::server_wrap( + flow, + state, + &req, + json_payload.into_inner(), + |state, auth: AuthenticationData, req| async move { + sdk_events_core( + &state.pool, + req, + auth.merchant_account.publishable_key.unwrap_or_default(), ) - .await, - ), - } + .await + .map(ApplicationResponse::Json) + }, + &auth::JWTAuth(Permission::Analytics), + api_locking::LockAction::NotApplicable, + )) + .await } -} -#[derive(Clone, Debug, serde::Deserialize)] -#[serde(tag = "source")] -#[serde(rename_all = "lowercase")] -pub enum AnalyticsConfig { - Sqlx { sqlx: Database }, -} + pub async fn generate_refund_report( + state: web::Data, + req: actix_web::HttpRequest, + json_payload: web::Json, + ) -> impl Responder { + let state_ref = &state; + let req_headers = &req.headers(); -impl Default for AnalyticsConfig { - fn default() -> Self { - Self::Sqlx { - sqlx: Database::default(), - } + let flow = AnalyticsFlow::GenerateRefundReport; + Box::pin(api::server_wrap( + flow, + state.clone(), + &req, + json_payload.into_inner(), + |state, auth: AuthenticationData, payload| async move { + let jwt_payload = + auth::parse_jwt_payload::(req_headers, state_ref).await; + + let user_id = jwt_payload + .change_context(AnalyticsError::UnknownError)? + .user_id; + + let user = UserInterface::find_user_by_id(&*state.store, &user_id) + .await + .change_context(AnalyticsError::UnknownError)?; + + let user_email = UserEmail::from_pii_email(user.email) + .change_context(AnalyticsError::UnknownError)? + .get_secret(); + + let lambda_req = GenerateReportRequest { + request: payload, + merchant_id: auth.merchant_account.merchant_id.to_string(), + email: user_email, + }; + + let json_bytes = + serde_json::to_vec(&lambda_req).map_err(|_| AnalyticsError::UnknownError)?; + invoke_lambda( + &state.conf.report_download_config.refund_function, + &state.conf.report_download_config.region, + &json_bytes, + ) + .await + .map(ApplicationResponse::Json) + }, + &auth::JWTAuth(Permission::Analytics), + api_locking::LockAction::NotApplicable, + )) + .await + } + + pub async fn generate_dispute_report( + state: web::Data, + req: actix_web::HttpRequest, + json_payload: web::Json, + ) -> impl Responder { + let state_ref = &state; + let req_headers = &req.headers(); + + let flow = AnalyticsFlow::GenerateDisputeReport; + Box::pin(api::server_wrap( + flow, + state.clone(), + &req, + json_payload.into_inner(), + |state, auth: AuthenticationData, payload| async move { + let jwt_payload = + auth::parse_jwt_payload::(req_headers, state_ref).await; + + let user_id = jwt_payload + .change_context(AnalyticsError::UnknownError)? + .user_id; + + let user = UserInterface::find_user_by_id(&*state.store, &user_id) + .await + .change_context(AnalyticsError::UnknownError)?; + + let user_email = UserEmail::from_pii_email(user.email) + .change_context(AnalyticsError::UnknownError)? + .get_secret(); + + let lambda_req = GenerateReportRequest { + request: payload, + merchant_id: auth.merchant_account.merchant_id.to_string(), + email: user_email, + }; + + let json_bytes = + serde_json::to_vec(&lambda_req).map_err(|_| AnalyticsError::UnknownError)?; + invoke_lambda( + &state.conf.report_download_config.dispute_function, + &state.conf.report_download_config.region, + &json_bytes, + ) + .await + .map(ApplicationResponse::Json) + }, + &auth::JWTAuth(Permission::Analytics), + api_locking::LockAction::NotApplicable, + )) + .await + } + + pub async fn generate_payment_report( + state: web::Data, + req: actix_web::HttpRequest, + json_payload: web::Json, + ) -> impl Responder { + let state_ref = &state; + let req_headers = &req.headers(); + + let flow = AnalyticsFlow::GeneratePaymentReport; + Box::pin(api::server_wrap( + flow, + state.clone(), + &req, + json_payload.into_inner(), + |state, auth: AuthenticationData, payload| async move { + let jwt_payload = + auth::parse_jwt_payload::(req_headers, state_ref).await; + + let user_id = jwt_payload + .change_context(AnalyticsError::UnknownError)? + .user_id; + + let user = UserInterface::find_user_by_id(&*state.store, &user_id) + .await + .change_context(AnalyticsError::UnknownError)?; + + let user_email = UserEmail::from_pii_email(user.email) + .change_context(AnalyticsError::UnknownError)? + .get_secret(); + + let lambda_req = GenerateReportRequest { + request: payload, + merchant_id: auth.merchant_account.merchant_id.to_string(), + email: user_email, + }; + + let json_bytes = + serde_json::to_vec(&lambda_req).map_err(|_| AnalyticsError::UnknownError)?; + invoke_lambda( + &state.conf.report_download_config.payment_function, + &state.conf.report_download_config.region, + &json_bytes, + ) + .await + .map(ApplicationResponse::Json) + }, + &auth::JWTAuth(Permission::Analytics), + api_locking::LockAction::NotApplicable, + )) + .await + } + + /// # Panics + /// + /// Panics if `json_payload` array does not contain one `GetApiEventMetricRequest` element. + pub async fn get_api_events_metrics( + state: web::Data, + req: actix_web::HttpRequest, + json_payload: web::Json<[GetApiEventMetricRequest; 1]>, + ) -> impl Responder { + // safety: This shouldn't panic owing to the data type + #[allow(clippy::expect_used)] + let payload = json_payload + .into_inner() + .to_vec() + .pop() + .expect("Couldn't get GetApiEventMetricRequest"); + let flow = AnalyticsFlow::GetApiEventMetrics; + Box::pin(api::server_wrap( + flow, + state.clone(), + &req, + payload, + |state, auth: AuthenticationData, req| async move { + analytics::api_event::get_api_event_metrics( + &state.pool, + &auth.merchant_account.merchant_id, + req, + ) + .await + .map(ApplicationResponse::Json) + }, + &auth::JWTAuth(Permission::Analytics), + api_locking::LockAction::NotApplicable, + )) + .await + } + + pub async fn get_api_event_filters( + state: web::Data, + req: actix_web::HttpRequest, + json_payload: web::Json, + ) -> impl Responder { + let flow = AnalyticsFlow::GetApiEventFilters; + Box::pin(api::server_wrap( + flow, + state.clone(), + &req, + json_payload.into_inner(), + |state, auth: AuthenticationData, req| async move { + analytics::api_event::get_filters( + &state.pool, + req, + auth.merchant_account.merchant_id, + ) + .await + .map(ApplicationResponse::Json) + }, + &auth::JWTAuth(Permission::Analytics), + api_locking::LockAction::NotApplicable, + )) + .await } } diff --git a/crates/router/src/analytics/core.rs b/crates/router/src/analytics/core.rs deleted file mode 100644 index bf124a6c0e85..000000000000 --- a/crates/router/src/analytics/core.rs +++ /dev/null @@ -1,96 +0,0 @@ -use api_models::analytics::{ - payments::PaymentDimensions, refunds::RefundDimensions, FilterValue, GetInfoResponse, - GetPaymentFiltersRequest, GetRefundFilterRequest, PaymentFiltersResponse, RefundFilterValue, - RefundFiltersResponse, -}; -use error_stack::ResultExt; - -use super::{ - errors::{self, AnalyticsError}, - payments::filters::{get_payment_filter_for_dimension, FilterRow}, - refunds::filters::{get_refund_filter_for_dimension, RefundFilterRow}, - types::AnalyticsDomain, - utils, AnalyticsProvider, -}; -use crate::{services::ApplicationResponse, types::domain}; - -pub type AnalyticsApiResponse = errors::AnalyticsResult>; - -pub async fn get_domain_info(domain: AnalyticsDomain) -> AnalyticsApiResponse { - let info = match domain { - AnalyticsDomain::Payments => GetInfoResponse { - metrics: utils::get_payment_metrics_info(), - download_dimensions: None, - dimensions: utils::get_payment_dimensions(), - }, - AnalyticsDomain::Refunds => GetInfoResponse { - metrics: utils::get_refund_metrics_info(), - download_dimensions: None, - dimensions: utils::get_refund_dimensions(), - }, - }; - Ok(ApplicationResponse::Json(info)) -} - -pub async fn payment_filters_core( - pool: AnalyticsProvider, - req: GetPaymentFiltersRequest, - merchant: domain::MerchantAccount, -) -> AnalyticsApiResponse { - let mut res = PaymentFiltersResponse::default(); - - for dim in req.group_by_names { - let values = match pool.clone() { - AnalyticsProvider::Sqlx(pool) => { - get_payment_filter_for_dimension(dim, &merchant.merchant_id, &req.time_range, &pool) - .await - } - } - .change_context(AnalyticsError::UnknownError)? - .into_iter() - .filter_map(|fil: FilterRow| match dim { - PaymentDimensions::Currency => fil.currency.map(|i| i.as_ref().to_string()), - PaymentDimensions::PaymentStatus => fil.status.map(|i| i.as_ref().to_string()), - PaymentDimensions::Connector => fil.connector, - PaymentDimensions::AuthType => fil.authentication_type.map(|i| i.as_ref().to_string()), - PaymentDimensions::PaymentMethod => fil.payment_method, - }) - .collect::>(); - res.query_data.push(FilterValue { - dimension: dim, - values, - }) - } - - Ok(ApplicationResponse::Json(res)) -} - -pub async fn refund_filter_core( - pool: AnalyticsProvider, - req: GetRefundFilterRequest, - merchant: domain::MerchantAccount, -) -> AnalyticsApiResponse { - let mut res = RefundFiltersResponse::default(); - for dim in req.group_by_names { - let values = match pool.clone() { - AnalyticsProvider::Sqlx(pool) => { - get_refund_filter_for_dimension(dim, &merchant.merchant_id, &req.time_range, &pool) - .await - } - } - .change_context(AnalyticsError::UnknownError)? - .into_iter() - .filter_map(|fil: RefundFilterRow| match dim { - RefundDimensions::Currency => fil.currency.map(|i| i.as_ref().to_string()), - RefundDimensions::RefundStatus => fil.refund_status.map(|i| i.as_ref().to_string()), - RefundDimensions::Connector => fil.connector, - RefundDimensions::RefundType => fil.refund_type.map(|i| i.as_ref().to_string()), - }) - .collect::>(); - res.query_data.push(RefundFilterValue { - dimension: dim, - values, - }) - } - Ok(ApplicationResponse::Json(res)) -} diff --git a/crates/router/src/analytics/payments.rs b/crates/router/src/analytics/payments.rs deleted file mode 100644 index 527bf75a3c72..000000000000 --- a/crates/router/src/analytics/payments.rs +++ /dev/null @@ -1,13 +0,0 @@ -pub mod accumulator; -mod core; -pub mod filters; -pub mod metrics; -pub mod types; -pub use accumulator::{PaymentMetricAccumulator, PaymentMetricsAccumulator}; - -pub trait PaymentAnalytics: - metrics::PaymentMetricAnalytics + filters::PaymentFilterAnalytics -{ -} - -pub use self::core::get_metrics; diff --git a/crates/router/src/analytics/payments/core.rs b/crates/router/src/analytics/payments/core.rs deleted file mode 100644 index 23eca8879a70..000000000000 --- a/crates/router/src/analytics/payments/core.rs +++ /dev/null @@ -1,129 +0,0 @@ -use std::collections::HashMap; - -use api_models::analytics::{ - payments::{MetricsBucketResponse, PaymentMetrics, PaymentMetricsBucketIdentifier}, - AnalyticsMetadata, GetPaymentMetricRequest, MetricsResponse, -}; -use error_stack::{IntoReport, ResultExt}; -use router_env::{ - instrument, logger, - tracing::{self, Instrument}, -}; - -use super::PaymentMetricsAccumulator; -use crate::{ - analytics::{ - core::AnalyticsApiResponse, errors::AnalyticsError, metrics, - payments::PaymentMetricAccumulator, AnalyticsProvider, - }, - services::ApplicationResponse, - types::domain, -}; - -#[instrument(skip_all)] -pub async fn get_metrics( - pool: AnalyticsProvider, - merchant_account: domain::MerchantAccount, - req: GetPaymentMetricRequest, -) -> AnalyticsApiResponse> { - let mut metrics_accumulator: HashMap< - PaymentMetricsBucketIdentifier, - PaymentMetricsAccumulator, - > = HashMap::new(); - - let mut set = tokio::task::JoinSet::new(); - for metric_type in req.metrics.iter().cloned() { - let req = req.clone(); - let merchant_id = merchant_account.merchant_id.clone(); - let pool = pool.clone(); - let task_span = tracing::debug_span!( - "analytics_payments_query", - payment_metric = metric_type.as_ref() - ); - set.spawn( - async move { - let data = pool - .get_payment_metrics( - &metric_type, - &req.group_by_names.clone(), - &merchant_id, - &req.filters, - &req.time_series.map(|t| t.granularity), - &req.time_range, - ) - .await - .change_context(AnalyticsError::UnknownError); - (metric_type, data) - } - .instrument(task_span), - ); - } - - while let Some((metric, data)) = set - .join_next() - .await - .transpose() - .into_report() - .change_context(AnalyticsError::UnknownError)? - { - let data = data?; - let attributes = &[ - metrics::request::add_attributes("metric_type", metric.to_string()), - metrics::request::add_attributes( - "source", - match pool { - crate::analytics::AnalyticsProvider::Sqlx(_) => "Sqlx", - }, - ), - ]; - - let value = u64::try_from(data.len()); - if let Ok(val) = value { - metrics::BUCKETS_FETCHED.record(&metrics::CONTEXT, val, attributes); - logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val); - } - - for (id, value) in data { - logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for metric {metric}"); - let metrics_builder = metrics_accumulator.entry(id).or_default(); - match metric { - PaymentMetrics::PaymentSuccessRate => metrics_builder - .payment_success_rate - .add_metrics_bucket(&value), - PaymentMetrics::PaymentCount => { - metrics_builder.payment_count.add_metrics_bucket(&value) - } - PaymentMetrics::PaymentSuccessCount => { - metrics_builder.payment_success.add_metrics_bucket(&value) - } - PaymentMetrics::PaymentProcessedAmount => { - metrics_builder.processed_amount.add_metrics_bucket(&value) - } - PaymentMetrics::AvgTicketSize => { - metrics_builder.avg_ticket_size.add_metrics_bucket(&value) - } - } - } - - logger::debug!( - "Analytics Accumulated Results: metric: {}, results: {:#?}", - metric, - metrics_accumulator - ); - } - - let query_data: Vec = metrics_accumulator - .into_iter() - .map(|(id, val)| MetricsBucketResponse { - values: val.collect(), - dimensions: id, - }) - .collect(); - - Ok(ApplicationResponse::Json(MetricsResponse { - query_data, - meta_data: [AnalyticsMetadata { - current_time_range: req.time_range, - }], - })) -} diff --git a/crates/router/src/analytics/refunds/core.rs b/crates/router/src/analytics/refunds/core.rs deleted file mode 100644 index 4c2d2c394181..000000000000 --- a/crates/router/src/analytics/refunds/core.rs +++ /dev/null @@ -1,104 +0,0 @@ -use std::collections::HashMap; - -use api_models::analytics::{ - refunds::{RefundMetrics, RefundMetricsBucketIdentifier, RefundMetricsBucketResponse}, - AnalyticsMetadata, GetRefundMetricRequest, MetricsResponse, -}; -use error_stack::{IntoReport, ResultExt}; -use router_env::{ - logger, - tracing::{self, Instrument}, -}; - -use super::RefundMetricsAccumulator; -use crate::{ - analytics::{ - core::AnalyticsApiResponse, errors::AnalyticsError, refunds::RefundMetricAccumulator, - AnalyticsProvider, - }, - services::ApplicationResponse, - types::domain, -}; - -pub async fn get_metrics( - pool: AnalyticsProvider, - merchant_account: domain::MerchantAccount, - req: GetRefundMetricRequest, -) -> AnalyticsApiResponse> { - let mut metrics_accumulator: HashMap = - HashMap::new(); - let mut set = tokio::task::JoinSet::new(); - for metric_type in req.metrics.iter().cloned() { - let req = req.clone(); - let merchant_id = merchant_account.merchant_id.clone(); - let pool = pool.clone(); - let task_span = tracing::debug_span!( - "analytics_refund_query", - refund_metric = metric_type.as_ref() - ); - set.spawn( - async move { - let data = pool - .get_refund_metrics( - &metric_type, - &req.group_by_names.clone(), - &merchant_id, - &req.filters, - &req.time_series.map(|t| t.granularity), - &req.time_range, - ) - .await - .change_context(AnalyticsError::UnknownError); - (metric_type, data) - } - .instrument(task_span), - ); - } - - while let Some((metric, data)) = set - .join_next() - .await - .transpose() - .into_report() - .change_context(AnalyticsError::UnknownError)? - { - for (id, value) in data? { - logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for metric {metric}"); - let metrics_builder = metrics_accumulator.entry(id).or_default(); - match metric { - RefundMetrics::RefundSuccessRate => metrics_builder - .refund_success_rate - .add_metrics_bucket(&value), - RefundMetrics::RefundCount => { - metrics_builder.refund_count.add_metrics_bucket(&value) - } - RefundMetrics::RefundSuccessCount => { - metrics_builder.refund_success.add_metrics_bucket(&value) - } - RefundMetrics::RefundProcessedAmount => { - metrics_builder.processed_amount.add_metrics_bucket(&value) - } - } - } - - logger::debug!( - "Analytics Accumulated Results: metric: {}, results: {:#?}", - metric, - metrics_accumulator - ); - } - let query_data: Vec = metrics_accumulator - .into_iter() - .map(|(id, val)| RefundMetricsBucketResponse { - values: val.collect(), - dimensions: id, - }) - .collect(); - - Ok(ApplicationResponse::Json(MetricsResponse { - query_data, - meta_data: [AnalyticsMetadata { - current_time_range: req.time_range, - }], - })) -} diff --git a/crates/router/src/analytics/routes.rs b/crates/router/src/analytics/routes.rs deleted file mode 100644 index 113312cdf10f..000000000000 --- a/crates/router/src/analytics/routes.rs +++ /dev/null @@ -1,164 +0,0 @@ -use actix_web::{web, Responder, Scope}; -use api_models::analytics::{ - GetPaymentFiltersRequest, GetPaymentMetricRequest, GetRefundFilterRequest, - GetRefundMetricRequest, -}; -use router_env::AnalyticsFlow; - -use super::{core::*, payments, refunds, types::AnalyticsDomain}; -use crate::{ - core::api_locking, - services::{ - api, authentication as auth, authentication::AuthenticationData, - authorization::permissions::Permission, - }, - AppState, -}; - -pub struct Analytics; - -impl Analytics { - pub fn server(state: AppState) -> Scope { - let route = web::scope("/analytics/v1").app_data(web::Data::new(state)); - route - .service(web::resource("metrics/payments").route(web::post().to(get_payment_metrics))) - .service(web::resource("metrics/refunds").route(web::post().to(get_refunds_metrics))) - .service(web::resource("filters/payments").route(web::post().to(get_payment_filters))) - .service(web::resource("filters/refunds").route(web::post().to(get_refund_filters))) - .service(web::resource("{domain}/info").route(web::get().to(get_info))) - } -} - -pub async fn get_info( - state: web::Data, - req: actix_web::HttpRequest, - domain: actix_web::web::Path, -) -> impl Responder { - let flow = AnalyticsFlow::GetInfo; - api::server_wrap( - flow, - state, - &req, - domain.into_inner(), - |_, _, domain| get_domain_info(domain), - &auth::NoAuth, - api_locking::LockAction::NotApplicable, - ) - .await -} - -/// # Panics -/// -/// Panics if `json_payload` array does not contain one `GetPaymentMetricRequest` element. -pub async fn get_payment_metrics( - state: web::Data, - req: actix_web::HttpRequest, - json_payload: web::Json<[GetPaymentMetricRequest; 1]>, -) -> impl Responder { - // safety: This shouldn't panic owing to the data type - #[allow(clippy::expect_used)] - let payload = json_payload - .into_inner() - .to_vec() - .pop() - .expect("Couldn't get GetPaymentMetricRequest"); - let flow = AnalyticsFlow::GetPaymentMetrics; - api::server_wrap( - flow, - state, - &req, - payload, - |state, auth: AuthenticationData, req| { - payments::get_metrics(state.pool.clone(), auth.merchant_account, req) - }, - auth::auth_type( - &auth::ApiKeyAuth, - &auth::JWTAuth(Permission::Analytics), - req.headers(), - ), - api_locking::LockAction::NotApplicable, - ) - .await -} - -/// # Panics -/// -/// Panics if `json_payload` array does not contain one `GetRefundMetricRequest` element. -pub async fn get_refunds_metrics( - state: web::Data, - req: actix_web::HttpRequest, - json_payload: web::Json<[GetRefundMetricRequest; 1]>, -) -> impl Responder { - #[allow(clippy::expect_used)] - // safety: This shouldn't panic owing to the data type - let payload = json_payload - .into_inner() - .to_vec() - .pop() - .expect("Couldn't get GetRefundMetricRequest"); - let flow = AnalyticsFlow::GetRefundsMetrics; - api::server_wrap( - flow, - state, - &req, - payload, - |state, auth: AuthenticationData, req| { - refunds::get_metrics(state.pool.clone(), auth.merchant_account, req) - }, - auth::auth_type( - &auth::ApiKeyAuth, - &auth::JWTAuth(Permission::Analytics), - req.headers(), - ), - api_locking::LockAction::NotApplicable, - ) - .await -} - -pub async fn get_payment_filters( - state: web::Data, - req: actix_web::HttpRequest, - json_payload: web::Json, -) -> impl Responder { - let flow = AnalyticsFlow::GetPaymentFilters; - api::server_wrap( - flow, - state, - &req, - json_payload.into_inner(), - |state, auth: AuthenticationData, req| { - payment_filters_core(state.pool.clone(), req, auth.merchant_account) - }, - auth::auth_type( - &auth::ApiKeyAuth, - &auth::JWTAuth(Permission::Analytics), - req.headers(), - ), - api_locking::LockAction::NotApplicable, - ) - .await -} - -pub async fn get_refund_filters( - state: web::Data, - req: actix_web::HttpRequest, - json_payload: web::Json, -) -> impl Responder { - let flow = AnalyticsFlow::GetRefundFilters; - api::server_wrap( - flow, - state, - &req, - json_payload.into_inner(), - |state, auth: AuthenticationData, req: GetRefundFilterRequest| { - refund_filter_core(state.pool.clone(), req, auth.merchant_account) - }, - auth::auth_type( - &auth::ApiKeyAuth, - &auth::JWTAuth(Permission::Analytics), - req.headers(), - ), - api_locking::LockAction::NotApplicable, - ) - .await -} diff --git a/crates/router/src/bin/scheduler.rs b/crates/router/src/bin/scheduler.rs index 4c19408582bc..32e9cfc6ca29 100644 --- a/crates/router/src/bin/scheduler.rs +++ b/crates/router/src/bin/scheduler.rs @@ -20,7 +20,6 @@ use strum::EnumString; use tokio::sync::{mpsc, oneshot}; const SCHEDULER_FLOW: &str = "SCHEDULER_FLOW"; - #[tokio::main] async fn main() -> CustomResult<(), ProcessTrackerError> { // console_subscriber::init(); @@ -30,7 +29,6 @@ async fn main() -> CustomResult<(), ProcessTrackerError> { #[allow(clippy::expect_used)] let conf = Settings::with_config_path(cmd_line.config_path) .expect("Unable to construct application configuration"); - let api_client = Box::new( services::ProxyClient::new( conf.proxy.clone(), diff --git a/crates/router/src/configs/defaults.rs b/crates/router/src/configs/defaults.rs index a92e63d67639..f9bfcae1ca10 100644 --- a/crates/router/src/configs/defaults.rs +++ b/crates/router/src/configs/defaults.rs @@ -503,15 +503,6 @@ impl Default for super::settings::RequiredFields { value: None, } ), - ( - "payment_method_data.card.card_holder_name".to_string(), - RequiredFieldInfo { - required_field: "payment_method_data.card.card_holder_name".to_string(), - display_name: "card_holder_name".to_string(), - field_type: enums::FieldType::UserFullName, - value: None, - } - ), ( "email".to_string(), RequiredFieldInfo { @@ -1910,14 +1901,63 @@ impl Default for super::settings::RequiredFields { } ), ( - "payment_method_data.card.card_holder_name".to_string(), + "billing.address.first_name".to_string(), RequiredFieldInfo { - required_field: "payment_method_data.card.card_holder_name".to_string(), + required_field: "billing.address.first_name".to_string(), display_name: "card_holder_name".to_string(), - field_type: enums::FieldType::UserFullName, + field_type: enums::FieldType::UserBillingName, value: None, } - ) + ), + ( + "billing.address.last_name".to_string(), + RequiredFieldInfo { + required_field: "billing.address.last_name".to_string(), + display_name: "card_holder_name".to_string(), + field_type: enums::FieldType::UserBillingName, + value: None, + } + ), + ( + "billing.address.line1".to_string(), + RequiredFieldInfo { + required_field: "billing.address.line1".to_string(), + display_name: "line1".to_string(), + field_type: enums::FieldType::UserAddressLine1, + value: None, + } + ), + ( + "billing.address.city".to_string(), + RequiredFieldInfo { + required_field: "billing.address.city".to_string(), + display_name: "city".to_string(), + field_type: enums::FieldType::UserAddressCity, + value: None, + } + ), + ( + "billing.address.zip".to_string(), + RequiredFieldInfo { + required_field: "billing.address.zip".to_string(), + display_name: "zip".to_string(), + field_type: enums::FieldType::UserAddressPincode, + value: None, + } + ), + ( + "billing.address.country".to_string(), + RequiredFieldInfo { + required_field: "billing.address.country".to_string(), + display_name: "country".to_string(), + field_type: enums::FieldType::UserAddressCountry{ + options: vec![ + "ALL".to_string(), + ] + }, + value: None, + } + ), ] ), common: HashMap::new() @@ -2369,6 +2409,129 @@ impl Default for super::settings::RequiredFields { common: HashMap::new(), } ), + ( + enums::Connector::Bankofamerica, + RequiredFieldFinal { + mandate: HashMap::new(), + non_mandate: HashMap::from( + [ + ( + "payment_method_data.card.card_number".to_string(), + RequiredFieldInfo { + required_field: "payment_method_data.card.card_number".to_string(), + display_name: "card_number".to_string(), + field_type: enums::FieldType::UserCardNumber, + value: None, + } + ), + ( + "payment_method_data.card.card_exp_month".to_string(), + RequiredFieldInfo { + required_field: "payment_method_data.card.card_exp_month".to_string(), + display_name: "card_exp_month".to_string(), + field_type: enums::FieldType::UserCardExpiryMonth, + value: None, + } + ), + ( + "payment_method_data.card.card_exp_year".to_string(), + RequiredFieldInfo { + required_field: "payment_method_data.card.card_exp_year".to_string(), + display_name: "card_exp_year".to_string(), + field_type: enums::FieldType::UserCardExpiryYear, + value: None, + } + ), + ( + "payment_method_data.card.card_cvc".to_string(), + RequiredFieldInfo { + required_field: "payment_method_data.card.card_cvc".to_string(), + display_name: "card_cvc".to_string(), + field_type: enums::FieldType::UserCardCvc, + value: None, + } + ), + ( + "email".to_string(), + RequiredFieldInfo { + required_field: "email".to_string(), + display_name: "email".to_string(), + field_type: enums::FieldType::UserEmailAddress, + value: None, + } + ), + ( + "billing.address.first_name".to_string(), + RequiredFieldInfo { + required_field: "billing.address.first_name".to_string(), + display_name: "billing_first_name".to_string(), + field_type: enums::FieldType::UserBillingName, + value: None, + } + ), + ( + "billing.address.last_name".to_string(), + RequiredFieldInfo { + required_field: "billing.address.last_name".to_string(), + display_name: "billing_last_name".to_string(), + field_type: enums::FieldType::UserBillingName, + value: None, + } + ), + ( + "billing.address.city".to_string(), + RequiredFieldInfo { + required_field: "billing.address.city".to_string(), + display_name: "city".to_string(), + field_type: enums::FieldType::UserAddressCity, + value: None, + } + ), + ( + "billing.address.state".to_string(), + RequiredFieldInfo { + required_field: "billing.address.state".to_string(), + display_name: "state".to_string(), + field_type: enums::FieldType::UserAddressState, + value: None, + } + ), + ( + "billing.address.zip".to_string(), + RequiredFieldInfo { + required_field: "billing.address.zip".to_string(), + display_name: "zip".to_string(), + field_type: enums::FieldType::UserAddressPincode, + value: None, + } + ), + ( + "billing.address.country".to_string(), + RequiredFieldInfo { + required_field: "billing.address.country".to_string(), + display_name: "country".to_string(), + field_type: enums::FieldType::UserAddressCountry{ + options: vec![ + "ALL".to_string(), + ] + }, + value: None, + } + ), + ( + "billing.address.line1".to_string(), + RequiredFieldInfo { + required_field: "billing.address.line1".to_string(), + display_name: "line1".to_string(), + field_type: enums::FieldType::UserAddressLine1, + value: None, + } + ), + ] + ), + common: HashMap::new(), + } + ), ( enums::Connector::Bluesnap, RequiredFieldFinal { @@ -3686,14 +3849,63 @@ impl Default for super::settings::RequiredFields { } ), ( - "payment_method_data.card.card_holder_name".to_string(), + "billing.address.first_name".to_string(), RequiredFieldInfo { - required_field: "payment_method_data.card.card_holder_name".to_string(), + required_field: "billing.address.first_name".to_string(), display_name: "card_holder_name".to_string(), - field_type: enums::FieldType::UserFullName, + field_type: enums::FieldType::UserBillingName, value: None, } - ) + ), + ( + "billing.address.last_name".to_string(), + RequiredFieldInfo { + required_field: "billing.address.last_name".to_string(), + display_name: "card_holder_name".to_string(), + field_type: enums::FieldType::UserBillingName, + value: None, + } + ), + ( + "billing.address.line1".to_string(), + RequiredFieldInfo { + required_field: "billing.address.line1".to_string(), + display_name: "line1".to_string(), + field_type: enums::FieldType::UserAddressLine1, + value: None, + } + ), + ( + "billing.address.city".to_string(), + RequiredFieldInfo { + required_field: "billing.address.city".to_string(), + display_name: "city".to_string(), + field_type: enums::FieldType::UserAddressCity, + value: None, + } + ), + ( + "billing.address.zip".to_string(), + RequiredFieldInfo { + required_field: "billing.address.zip".to_string(), + display_name: "zip".to_string(), + field_type: enums::FieldType::UserAddressPincode, + value: None, + } + ), + ( + "billing.address.country".to_string(), + RequiredFieldInfo { + required_field: "billing.address.country".to_string(), + display_name: "country".to_string(), + field_type: enums::FieldType::UserAddressCountry{ + options: vec![ + "ALL".to_string(), + ] + }, + value: None, + } + ), ] ), common: HashMap::new() @@ -4056,6 +4268,64 @@ impl Default for super::settings::RequiredFields { value: None, } ), + ( + "billing.address.first_name".to_string(), + RequiredFieldInfo { + required_field: "billing.address.first_name".to_string(), + display_name: "card_holder_name".to_string(), + field_type: enums::FieldType::UserBillingName, + value: None, + } + ), + ( + "billing.address.last_name".to_string(), + RequiredFieldInfo { + required_field: "billing.address.last_name".to_string(), + display_name: "card_holder_name".to_string(), + field_type: enums::FieldType::UserBillingName, + value: None, + } + ), + ( + "billing.address.line1".to_string(), + RequiredFieldInfo { + required_field: "billing.address.line1".to_string(), + display_name: "line1".to_string(), + field_type: enums::FieldType::UserAddressLine1, + value: None, + } + ), + ( + "billing.address.city".to_string(), + RequiredFieldInfo { + required_field: "billing.address.city".to_string(), + display_name: "city".to_string(), + field_type: enums::FieldType::UserAddressCity, + value: None, + } + ), + ( + "billing.address.zip".to_string(), + RequiredFieldInfo { + required_field: "billing.address.zip".to_string(), + display_name: "zip".to_string(), + field_type: enums::FieldType::UserAddressPincode, + value: None, + } + ), + ( + "billing.address.country".to_string(), + RequiredFieldInfo { + required_field: "billing.address.country".to_string(), + display_name: "country".to_string(), + field_type: enums::FieldType::UserAddressCountry{ + options: vec![ + "ALL".to_string(), + ] + }, + value: None, + } + ), ]), } ) @@ -4094,6 +4364,93 @@ impl Default for super::settings::RequiredFields { common: HashMap::new(), } ), + ( + enums::Connector::Bankofamerica, + RequiredFieldFinal { + mandate: HashMap::new(), + non_mandate: HashMap::from( + [ + ( + "email".to_string(), + RequiredFieldInfo { + required_field: "email".to_string(), + display_name: "email".to_string(), + field_type: enums::FieldType::UserEmailAddress, + value: None, + } + ), + ( + "billing.address.first_name".to_string(), + RequiredFieldInfo { + required_field: "billing.address.first_name".to_string(), + display_name: "billing_first_name".to_string(), + field_type: enums::FieldType::UserBillingName, + value: None, + } + ), + ( + "billing.address.last_name".to_string(), + RequiredFieldInfo { + required_field: "billing.address.last_name".to_string(), + display_name: "billing_last_name".to_string(), + field_type: enums::FieldType::UserBillingName, + value: None, + } + ), + ( + "billing.address.city".to_string(), + RequiredFieldInfo { + required_field: "billing.address.city".to_string(), + display_name: "city".to_string(), + field_type: enums::FieldType::UserAddressCity, + value: None, + } + ), + ( + "billing.address.state".to_string(), + RequiredFieldInfo { + required_field: "billing.address.state".to_string(), + display_name: "state".to_string(), + field_type: enums::FieldType::UserAddressState, + value: None, + } + ), + ( + "billing.address.zip".to_string(), + RequiredFieldInfo { + required_field: "billing.address.zip".to_string(), + display_name: "zip".to_string(), + field_type: enums::FieldType::UserAddressPincode, + value: None, + } + ), + ( + "billing.address.country".to_string(), + RequiredFieldInfo { + required_field: "billing.address.country".to_string(), + display_name: "country".to_string(), + field_type: enums::FieldType::UserAddressCountry{ + options: vec![ + "ALL".to_string(), + ] + }, + value: None, + } + ), + ( + "billing.address.line1".to_string(), + RequiredFieldInfo { + required_field: "billing.address.line1".to_string(), + display_name: "line1".to_string(), + field_type: enums::FieldType::UserAddressLine1, + value: None, + } + ), + ] + ), + common: HashMap::new(), + } + ), ]), }, ), diff --git a/crates/router/src/configs/kms.rs b/crates/router/src/configs/kms.rs index c2f159d16cf1..37f2d15774a5 100644 --- a/crates/router/src/configs/kms.rs +++ b/crates/router/src/configs/kms.rs @@ -63,7 +63,7 @@ impl KmsDecrypt for settings::Database { password: self.password.decrypt_inner(kms_client).await?.into(), pool_size: self.pool_size, connection_timeout: self.connection_timeout, - queue_strategy: self.queue_strategy.into(), + queue_strategy: self.queue_strategy, min_idle: self.min_idle, max_lifetime: self.max_lifetime, }) diff --git a/crates/router/src/configs/settings.rs b/crates/router/src/configs/settings.rs index 918ae6647eef..f2d962b0abee 100644 --- a/crates/router/src/configs/settings.rs +++ b/crates/router/src/configs/settings.rs @@ -4,6 +4,8 @@ use std::{ str::FromStr, }; +#[cfg(feature = "olap")] +use analytics::ReportConfig; use api_models::{enums, payment_methods::RequiredFieldInfo}; use common_utils::ext_traits::ConfigExt; use config::{Environment, File}; @@ -16,12 +18,14 @@ pub use router_env::config::{Log, LogConsole, LogFile, LogTelemetry}; use rust_decimal::Decimal; use scheduler::SchedulerSettings; use serde::{de::Error, Deserialize, Deserializer}; +use storage_impl::config::QueueStrategy; #[cfg(feature = "olap")] use crate::analytics::AnalyticsConfig; use crate::{ core::errors::{ApplicationError, ApplicationResult}, env::{self, logger, Env}, + events::EventsConfig, }; #[cfg(feature = "kms")] pub type Password = kms::KmsValue; @@ -109,6 +113,9 @@ pub struct Settings { pub analytics: AnalyticsConfig, #[cfg(feature = "kv_store")] pub kv_config: KvConfig, + #[cfg(feature = "olap")] + pub report_download_config: ReportConfig, + pub events: EventsConfig, } #[derive(Debug, Deserialize, Clone)] @@ -521,23 +528,6 @@ pub struct Database { pub max_lifetime: Option, } -#[derive(Debug, Deserialize, Clone, Default)] -#[serde(rename_all = "PascalCase")] -pub enum QueueStrategy { - #[default] - Fifo, - Lifo, -} - -impl From for bb8::QueueStrategy { - fn from(value: QueueStrategy) -> Self { - match value { - QueueStrategy::Fifo => Self::Fifo, - QueueStrategy::Lifo => Self::Lifo, - } - } -} - #[cfg(not(feature = "kms"))] impl From for storage_impl::config::Database { fn from(val: Database) -> Self { @@ -837,6 +827,7 @@ impl Settings { #[cfg(feature = "s3")] self.file_upload_config.validate()?; self.lock_settings.validate()?; + self.events.validate()?; Ok(()) } } diff --git a/crates/router/src/connector/aci/transformers.rs b/crates/router/src/connector/aci/transformers.rs index 66aeb3bb6b2b..9cfb657bdca8 100644 --- a/crates/router/src/connector/aci/transformers.rs +++ b/crates/router/src/connector/aci/transformers.rs @@ -733,6 +733,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: Some(item.response.id), + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/adyen/transformers.rs b/crates/router/src/connector/adyen/transformers.rs index cfa601112677..1793e3e07a87 100644 --- a/crates/router/src/connector/adyen/transformers.rs +++ b/crates/router/src/connector/adyen/transformers.rs @@ -879,7 +879,126 @@ impl TryFrom<&api_enums::BankNames> for OpenBankingUKIssuer { api::enums::BankNames::TsbBank => Ok(Self::TsbBank), api::enums::BankNames::TescoBank => Ok(Self::TescoBank), api::enums::BankNames::UlsterBank => Ok(Self::UlsterBank), - _ => Err(errors::ConnectorError::NotSupported { + enums::BankNames::AmericanExpress + | enums::BankNames::AffinBank + | enums::BankNames::AgroBank + | enums::BankNames::AllianceBank + | enums::BankNames::AmBank + | enums::BankNames::BankOfAmerica + | enums::BankNames::BankIslam + | enums::BankNames::BankMuamalat + | enums::BankNames::BankRakyat + | enums::BankNames::BankSimpananNasional + | enums::BankNames::BlikPSP + | enums::BankNames::CapitalOne + | enums::BankNames::Chase + | enums::BankNames::Citi + | enums::BankNames::CimbBank + | enums::BankNames::Discover + | enums::BankNames::NavyFederalCreditUnion + | enums::BankNames::PentagonFederalCreditUnion + | enums::BankNames::SynchronyBank + | enums::BankNames::WellsFargo + | enums::BankNames::AbnAmro + | enums::BankNames::AsnBank + | enums::BankNames::Bunq + | enums::BankNames::Handelsbanken + | enums::BankNames::HongLeongBank + | enums::BankNames::Ing + | enums::BankNames::Knab + | enums::BankNames::KuwaitFinanceHouse + | enums::BankNames::Moneyou + | enums::BankNames::Rabobank + | enums::BankNames::Regiobank + | enums::BankNames::SnsBank + | enums::BankNames::TriodosBank + | enums::BankNames::VanLanschot + | enums::BankNames::ArzteUndApothekerBank + | enums::BankNames::AustrianAnadiBankAg + | enums::BankNames::BankAustria + | enums::BankNames::Bank99Ag + | enums::BankNames::BankhausCarlSpangler + | enums::BankNames::BankhausSchelhammerUndSchatteraAg + | enums::BankNames::BankMillennium + | enums::BankNames::BankPEKAOSA + | enums::BankNames::BawagPskAg + | enums::BankNames::BksBankAg + | enums::BankNames::BrullKallmusBankAg + | enums::BankNames::BtvVierLanderBank + | enums::BankNames::CapitalBankGraweGruppeAg + | enums::BankNames::CeskaSporitelna + | enums::BankNames::Dolomitenbank + | enums::BankNames::EasybankAg + | enums::BankNames::EPlatbyVUB + | enums::BankNames::ErsteBankUndSparkassen + | enums::BankNames::FrieslandBank + | enums::BankNames::HypoAlpeadriabankInternationalAg + | enums::BankNames::HypoNoeLbFurNiederosterreichUWien + | enums::BankNames::HypoOberosterreichSalzburgSteiermark + | enums::BankNames::HypoTirolBankAg + | enums::BankNames::HypoVorarlbergBankAg + | enums::BankNames::HypoBankBurgenlandAktiengesellschaft + | enums::BankNames::KomercniBanka + | enums::BankNames::MBank + | enums::BankNames::MarchfelderBank + | enums::BankNames::Maybank + | enums::BankNames::OberbankAg + | enums::BankNames::OsterreichischeArzteUndApothekerbank + | enums::BankNames::OcbcBank + | enums::BankNames::PayWithING + | enums::BankNames::PlaceZIPKO + | enums::BankNames::PlatnoscOnlineKartaPlatnicza + | enums::BankNames::PosojilnicaBankEGen + | enums::BankNames::PostovaBanka + | enums::BankNames::PublicBank + | enums::BankNames::RaiffeisenBankengruppeOsterreich + | enums::BankNames::RhbBank + | enums::BankNames::SchelhammerCapitalBankAg + | enums::BankNames::StandardCharteredBank + | enums::BankNames::SchoellerbankAg + | enums::BankNames::SpardaBankWien + | enums::BankNames::SporoPay + | enums::BankNames::TatraPay + | enums::BankNames::Viamo + | enums::BankNames::VolksbankGruppe + | enums::BankNames::VolkskreditbankAg + | enums::BankNames::VrBankBraunau + | enums::BankNames::UobBank + | enums::BankNames::PayWithAliorBank + | enums::BankNames::BankiSpoldzielcze + | enums::BankNames::PayWithInteligo + | enums::BankNames::BNPParibasPoland + | enums::BankNames::BankNowySA + | enums::BankNames::CreditAgricole + | enums::BankNames::PayWithBOS + | enums::BankNames::PayWithCitiHandlowy + | enums::BankNames::PayWithPlusBank + | enums::BankNames::ToyotaBank + | enums::BankNames::VeloBank + | enums::BankNames::ETransferPocztowy24 + | enums::BankNames::PlusBank + | enums::BankNames::EtransferPocztowy24 + | enums::BankNames::BankiSpbdzielcze + | enums::BankNames::BankNowyBfgSa + | enums::BankNames::GetinBank + | enums::BankNames::Blik + | enums::BankNames::NoblePay + | enums::BankNames::IdeaBank + | enums::BankNames::EnveloBank + | enums::BankNames::NestPrzelew + | enums::BankNames::MbankMtransfer + | enums::BankNames::Inteligo + | enums::BankNames::PbacZIpko + | enums::BankNames::BnpParibas + | enums::BankNames::BankPekaoSa + | enums::BankNames::VolkswagenBank + | enums::BankNames::AliorBank + | enums::BankNames::Boz + | enums::BankNames::BangkokBank + | enums::BankNames::KrungsriBank + | enums::BankNames::KrungThaiBank + | enums::BankNames::TheSiamCommercialBank + | enums::BankNames::KasikornBank => Err(errors::ConnectorError::NotSupported { message: String::from("BankRedirect"), connector: "Adyen", })?, @@ -2102,7 +2221,12 @@ impl<'a> TryFrom<&api_models::payments::BankRedirectData> for AdyenPaymentMethod ), api_models::payments::BankRedirectData::OpenBankingUk { issuer, .. } => Ok( AdyenPaymentMethod::OpenBankingUK(Box::new(OpenBankingUKData { - issuer: OpenBankingUKIssuer::try_from(issuer)?, + issuer: match issuer { + Some(bank_name) => OpenBankingUKIssuer::try_from(bank_name)?, + None => Err(errors::ConnectorError::MissingRequiredField { + field_name: "issuer", + })?, + }, })), ), api_models::payments::BankRedirectData::Sofort { .. } => Ok(AdyenPaymentMethod::Sofort), @@ -2580,7 +2704,7 @@ impl<'a> let additional_data = get_additional_data(item.router_data); let return_url = item.router_data.request.get_return_url()?; let payment_method = AdyenPaymentMethod::try_from(bank_redirect_data)?; - let (shopper_locale, country) = get_redirect_extra_details(item.router_data); + let (shopper_locale, country) = get_redirect_extra_details(item.router_data)?; let line_items = Some(get_line_items(item)); Ok(AdyenPaymentRequest { @@ -2611,7 +2735,7 @@ impl<'a> fn get_redirect_extra_details( item: &types::PaymentsAuthorizeRouterData, -) -> (Option, Option) { +) -> Result<(Option, Option), errors::ConnectorError> { match item.request.payment_method_data { api_models::payments::PaymentMethodData::BankRedirect(ref redirect_data) => { match redirect_data { @@ -2619,17 +2743,20 @@ fn get_redirect_extra_details( country, preferred_language, .. - } => ( + } => Ok(( Some(preferred_language.to_string()), Some(country.to_owned()), - ), + )), api_models::payments::BankRedirectData::OpenBankingUk { country, .. } => { - (None, Some(country.to_owned())) + let country = country.ok_or(errors::ConnectorError::MissingRequiredField { + field_name: "country", + })?; + Ok((None, Some(country))) } - _ => (None, None), + _ => Ok((None, None)), } } - _ => (None, None), + _ => Ok((None, None)), } } @@ -2851,6 +2978,7 @@ impl TryFrom> connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) @@ -2884,6 +3012,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), payment_method_balance: Some(types::PaymentMethodBalance { amount: item.response.balance.value, @@ -2945,6 +3074,7 @@ pub fn get_adyen_response( connector_metadata: None, network_txn_id, connector_response_reference_id: Some(response.merchant_reference), + incremental_authorization_allowed: None, }; Ok((status, error, payments_response_data)) } @@ -3044,6 +3174,7 @@ pub fn get_redirection_response( connector_metadata, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }; Ok((status, error, payments_response_data)) } @@ -3095,6 +3226,7 @@ pub fn get_present_to_shopper_response( connector_metadata, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }; Ok((status, error, payments_response_data)) } @@ -3143,6 +3275,7 @@ pub fn get_qr_code_response( connector_metadata, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }; Ok((status, error, payments_response_data)) } @@ -3177,6 +3310,7 @@ pub fn get_redirection_error_response( connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }; Ok((status, error, payments_response_data)) @@ -3511,6 +3645,7 @@ impl TryFrom> connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), amount_captured: Some(item.response.amount.value), ..item.data diff --git a/crates/router/src/connector/airwallex/transformers.rs b/crates/router/src/connector/airwallex/transformers.rs index 3785e02d4747..2de7f6fe00ff 100644 --- a/crates/router/src/connector/airwallex/transformers.rs +++ b/crates/router/src/connector/airwallex/transformers.rs @@ -555,6 +555,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) @@ -596,6 +597,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/authorizedotnet/transformers.rs b/crates/router/src/connector/authorizedotnet/transformers.rs index 2c8a63a53e5c..30323ca4ef23 100644 --- a/crates/router/src/connector/authorizedotnet/transformers.rs +++ b/crates/router/src/connector/authorizedotnet/transformers.rs @@ -610,6 +610,7 @@ impl connector_response_reference_id: Some( transaction_response.transaction_id.clone(), ), + incremental_authorization_allowed: None, }), }, ..item.data @@ -680,6 +681,7 @@ impl connector_response_reference_id: Some( transaction_response.transaction_id.clone(), ), + incremental_authorization_allowed: None, }), }, ..item.data @@ -977,6 +979,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: Some(transaction.transaction_id.clone()), + incremental_authorization_allowed: None, }), status: payment_status, ..item.data diff --git a/crates/router/src/connector/bambora/transformers.rs b/crates/router/src/connector/bambora/transformers.rs index e686186c901b..2d50569f9a49 100644 --- a/crates/router/src/connector/bambora/transformers.rs +++ b/crates/router/src/connector/bambora/transformers.rs @@ -215,6 +215,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: Some(pg_response.order_number.to_string()), + incremental_authorization_allowed: None, }), ..item.data }), @@ -241,6 +242,7 @@ impl connector_response_reference_id: Some( item.data.connector_request_reference_id.to_string(), ), + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/bankofamerica/transformers.rs b/crates/router/src/connector/bankofamerica/transformers.rs index 12170deb1a00..e31a69669c6d 100644 --- a/crates/router/src/connector/bankofamerica/transformers.rs +++ b/crates/router/src/connector/bankofamerica/transformers.rs @@ -442,11 +442,18 @@ impl ForeignFrom<(BankofamericaPaymentStatus, bool)> for enums::AttemptStatus { | BankofamericaPaymentStatus::AuthorizedPendingReview => { if auto_capture { // Because BankOfAmerica will return Payment Status as Authorized even in AutoCapture Payment - Self::Pending + Self::Charged } else { Self::Authorized } } + BankofamericaPaymentStatus::Pending => { + if auto_capture { + Self::Charged + } else { + Self::Pending + } + } BankofamericaPaymentStatus::Succeeded | BankofamericaPaymentStatus::Transmitted => { Self::Charged } @@ -456,7 +463,6 @@ impl ForeignFrom<(BankofamericaPaymentStatus, bool)> for enums::AttemptStatus { BankofamericaPaymentStatus::Failed | BankofamericaPaymentStatus::Declined => { Self::Failure } - BankofamericaPaymentStatus::Pending => Self::Pending, } } } @@ -528,6 +534,7 @@ impl .code .unwrap_or(info_response.id), ), + incremental_authorization_allowed: None, }), ..item.data }), @@ -585,6 +592,7 @@ impl .code .unwrap_or(info_response.id), ), + incremental_authorization_allowed: None, }), ..item.data }), @@ -642,6 +650,7 @@ impl .code .unwrap_or(info_response.id), ), + incremental_authorization_allowed: None, }), ..item.data }), @@ -719,6 +728,7 @@ impl .client_reference_information .map(|cref| cref.code) .unwrap_or(Some(app_response.id)), + incremental_authorization_allowed: None, }), ..item.data }), @@ -733,6 +743,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: Some(error_response.id), + incremental_authorization_allowed: None, }), ..item.data }), diff --git a/crates/router/src/connector/bitpay/transformers.rs b/crates/router/src/connector/bitpay/transformers.rs index 89dd2368b2b7..0ddf2dbf913b 100644 --- a/crates/router/src/connector/bitpay/transformers.rs +++ b/crates/router/src/connector/bitpay/transformers.rs @@ -178,6 +178,7 @@ impl .data .order_id .or(Some(item.response.data.id)), + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/bluesnap.rs b/crates/router/src/connector/bluesnap.rs index 0bc56d4e9955..25cdcb731f11 100644 --- a/crates/router/src/connector/bluesnap.rs +++ b/crates/router/src/connector/bluesnap.rs @@ -713,6 +713,7 @@ impl ConnectorIntegration connector_metadata: None, network_txn_id: None, connector_response_reference_id: Some(item.response.transaction_id), + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/boku/transformers.rs b/crates/router/src/connector/boku/transformers.rs index 3df9126fc4c0..c671560765d0 100644 --- a/crates/router/src/connector/boku/transformers.rs +++ b/crates/router/src/connector/boku/transformers.rs @@ -252,6 +252,7 @@ impl TryFrom connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) @@ -272,6 +273,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }), @@ -435,6 +437,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) @@ -452,6 +455,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }), @@ -495,6 +499,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) @@ -539,6 +544,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) @@ -1061,6 +1067,7 @@ impl TryFrom> connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) @@ -1158,6 +1165,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) @@ -1255,6 +1263,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/braintree/transformers.rs b/crates/router/src/connector/braintree/transformers.rs index dcca9c26434c..44daef94e8a6 100644 --- a/crates/router/src/connector/braintree/transformers.rs +++ b/crates/router/src/connector/braintree/transformers.rs @@ -239,6 +239,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/cashtocode/transformers.rs b/crates/router/src/connector/cashtocode/transformers.rs index cfca998e06c3..b38ca4b67132 100644 --- a/crates/router/src/connector/cashtocode/transformers.rs +++ b/crates/router/src/connector/cashtocode/transformers.rs @@ -238,6 +238,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ) } @@ -281,6 +282,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), amount_captured: Some(item.response.amount), ..item.data diff --git a/crates/router/src/connector/checkout/transformers.rs b/crates/router/src/connector/checkout/transformers.rs index 173ac0b8f585..ebe02f30d5ff 100644 --- a/crates/router/src/connector/checkout/transformers.rs +++ b/crates/router/src/connector/checkout/transformers.rs @@ -591,6 +591,7 @@ impl TryFrom> connector_response_reference_id: Some( item.response.reference.unwrap_or(item.response.id), ), + incremental_authorization_allowed: None, }; Ok(Self { status, @@ -640,6 +641,7 @@ impl TryFrom> connector_response_reference_id: Some( item.response.reference.unwrap_or(item.response.id), ), + incremental_authorization_allowed: None, }; Ok(Self { status, @@ -714,6 +716,7 @@ impl TryFrom> connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), status: response.into(), ..item.data @@ -810,6 +813,7 @@ impl TryFrom> connector_metadata: None, network_txn_id: None, connector_response_reference_id: item.response.reference, + incremental_authorization_allowed: None, }), status, amount_captured, diff --git a/crates/router/src/connector/coinbase/transformers.rs b/crates/router/src/connector/coinbase/transformers.rs index 6cc097bc9d8d..ce9bb3e871c5 100644 --- a/crates/router/src/connector/coinbase/transformers.rs +++ b/crates/router/src/connector/coinbase/transformers.rs @@ -146,6 +146,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: Some(item.response.data.id.clone()), + incremental_authorization_allowed: None, }), |context| { Ok(types::PaymentsResponseData::TransactionUnresolvedResponse{ diff --git a/crates/router/src/connector/cryptopay/transformers.rs b/crates/router/src/connector/cryptopay/transformers.rs index 446da0761d1f..3af604c786b8 100644 --- a/crates/router/src/connector/cryptopay/transformers.rs +++ b/crates/router/src/connector/cryptopay/transformers.rs @@ -173,6 +173,7 @@ impl .data .custom_id .or(Some(item.response.data.id)), + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/cybersource.rs b/crates/router/src/connector/cybersource.rs index 1868611184f9..1de107af086d 100644 --- a/crates/router/src/connector/cybersource.rs +++ b/crates/router/src/connector/cybersource.rs @@ -307,18 +307,15 @@ impl data: &types::SetupMandateRouterData, res: types::Response, ) -> CustomResult { - let response: cybersource::CybersourcePaymentsResponse = res + let response: cybersource::CybersourceSetupMandatesResponse = res .response - .parse_struct("CybersourceMandateResponse") + .parse_struct("CybersourceSetupMandatesResponse") .change_context(errors::ConnectorError::ResponseDeserializationFailed)?; - types::RouterData::try_from(( - types::ResponseRouterData { - response, - data: data.clone(), - http_code: res.status_code, - }, - false, - )) + types::RouterData::try_from(types::ResponseRouterData { + response, + data: data.clone(), + http_code: res.status_code, + }) } fn get_error_response( diff --git a/crates/router/src/connector/cybersource/transformers.rs b/crates/router/src/connector/cybersource/transformers.rs index 656c45b6d6b6..953f82c76a83 100644 --- a/crates/router/src/connector/cybersource/transformers.rs +++ b/crates/router/src/connector/cybersource/transformers.rs @@ -499,6 +499,16 @@ pub struct CybersourcePaymentsResponse { token_information: Option, } +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CybersourceSetupMandatesResponse { + id: String, + status: CybersourcePaymentStatus, + error_information: Option, + client_reference_information: Option, + token_information: Option, +} + #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct ClientReferenceInformation { @@ -544,8 +554,9 @@ impl connector_mandate_id: Some(token_info.instrument_identifier.id), payment_method_id: None, }); + let status = get_payment_status(is_capture, item.response.status.into()); Ok(Self { - status: get_payment_status(is_capture, item.response.status.into()), + status, response: match item.response.error_information { Some(error) => Err(types::ErrorResponse { code: consts::NO_ERROR_CODE.to_string(), @@ -553,7 +564,7 @@ impl reason: Some(error.reason), status_code: item.http_code, attempt_status: None, - connector_transaction_id: None, + connector_transaction_id: Some(item.response.id), }), _ => Ok(types::PaymentsResponseData::TransactionResponse { resource_id: types::ResponseId::ConnectorTransactionId( @@ -568,6 +579,74 @@ impl .client_reference_information .map(|cref| cref.code) .unwrap_or(Some(item.response.id)), + incremental_authorization_allowed: Some( + status == enums::AttemptStatus::Authorized, + ), + }), + }, + ..item.data + }) + } +} + +impl + TryFrom< + types::ResponseRouterData< + F, + CybersourceSetupMandatesResponse, + T, + types::PaymentsResponseData, + >, + > for types::RouterData +{ + type Error = error_stack::Report; + fn try_from( + item: types::ResponseRouterData< + F, + CybersourceSetupMandatesResponse, + T, + types::PaymentsResponseData, + >, + ) -> Result { + let mandate_reference = + item.response + .token_information + .map(|token_info| types::MandateReference { + connector_mandate_id: Some(token_info.instrument_identifier.id), + payment_method_id: None, + }); + let mut mandate_status: enums::AttemptStatus = item.response.status.into(); + if matches!(mandate_status, enums::AttemptStatus::Authorized) { + //In case of zero auth mandates we want to make the payment reach the terminal status so we are converting the authorized status to charged as well. + mandate_status = enums::AttemptStatus::Charged + } + Ok(Self { + status: mandate_status, + response: match item.response.error_information { + Some(error) => Err(types::ErrorResponse { + code: consts::NO_ERROR_CODE.to_string(), + message: error.message, + reason: Some(error.reason), + status_code: item.http_code, + attempt_status: None, + connector_transaction_id: Some(item.response.id), + }), + _ => Ok(types::PaymentsResponseData::TransactionResponse { + resource_id: types::ResponseId::ConnectorTransactionId( + item.response.id.clone(), + ), + redirection_data: None, + mandate_reference, + connector_metadata: None, + network_txn_id: None, + connector_response_reference_id: item + .response + .client_reference_information + .map(|cref| cref.code) + .unwrap_or(Some(item.response.id)), + incremental_authorization_allowed: Some( + mandate_status == enums::AttemptStatus::Authorized, + ), }), }, ..item.data @@ -591,8 +670,9 @@ pub struct ApplicationInformation { fn get_payment_status(is_capture: bool, status: enums::AttemptStatus) -> enums::AttemptStatus { let is_authorized = matches!(status, enums::AttemptStatus::Authorized); - if is_capture && is_authorized { - return enums::AttemptStatus::Pending; + let is_pending = matches!(status, enums::AttemptStatus::Pending); + if is_capture && (is_authorized || is_pending) { + return enums::AttemptStatus::Charged; } status } @@ -622,11 +702,12 @@ impl ) -> Result { let item = data.0; let is_capture = data.1; + let status = get_payment_status( + is_capture, + item.response.application_information.status.into(), + ); Ok(Self { - status: get_payment_status( - is_capture, - item.response.application_information.status.into(), - ), + status, response: Ok(types::PaymentsResponseData::TransactionResponse { resource_id: types::ResponseId::ConnectorTransactionId(item.response.id.clone()), redirection_data: None, @@ -638,6 +719,7 @@ impl .client_reference_information .map(|cref| cref.code) .unwrap_or(Some(item.response.id)), + incremental_authorization_allowed: Some(status == enums::AttemptStatus::Authorized), }), ..item.data }) diff --git a/crates/router/src/connector/dlocal/transformers.rs b/crates/router/src/connector/dlocal/transformers.rs index a9033e53d666..92d01cfe56d4 100644 --- a/crates/router/src/connector/dlocal/transformers.rs +++ b/crates/router/src/connector/dlocal/transformers.rs @@ -303,7 +303,7 @@ pub struct DlocalPaymentsResponse { status: DlocalPaymentStatus, id: String, three_dsecure: Option, - order_id: String, + order_id: Option, } impl @@ -323,12 +323,13 @@ impl }); let response = types::PaymentsResponseData::TransactionResponse { - resource_id: types::ResponseId::ConnectorTransactionId(item.response.order_id.clone()), + resource_id: types::ResponseId::ConnectorTransactionId(item.response.id.clone()), redirection_data, mandate_reference: None, connector_metadata: None, network_txn_id: None, - connector_response_reference_id: Some(item.response.order_id.clone()), + connector_response_reference_id: item.response.order_id.clone(), + incremental_authorization_allowed: None, }; Ok(Self { status: enums::AttemptStatus::from(item.response.status), @@ -342,7 +343,7 @@ impl pub struct DlocalPaymentsSyncResponse { status: DlocalPaymentStatus, id: String, - order_id: String, + order_id: Option, } impl @@ -362,14 +363,13 @@ impl Ok(Self { status: enums::AttemptStatus::from(item.response.status), response: Ok(types::PaymentsResponseData::TransactionResponse { - resource_id: types::ResponseId::ConnectorTransactionId( - item.response.order_id.clone(), - ), + resource_id: types::ResponseId::ConnectorTransactionId(item.response.id.clone()), redirection_data: None, mandate_reference: None, connector_metadata: None, network_txn_id: None, - connector_response_reference_id: Some(item.response.order_id.clone()), + connector_response_reference_id: item.response.order_id.clone(), + incremental_authorization_allowed: None, }), ..item.data }) @@ -380,7 +380,7 @@ impl pub struct DlocalPaymentsCaptureResponse { status: DlocalPaymentStatus, id: String, - order_id: String, + order_id: Option, } impl @@ -400,14 +400,13 @@ impl Ok(Self { status: enums::AttemptStatus::from(item.response.status), response: Ok(types::PaymentsResponseData::TransactionResponse { - resource_id: types::ResponseId::ConnectorTransactionId( - item.response.order_id.clone(), - ), + resource_id: types::ResponseId::ConnectorTransactionId(item.response.id.clone()), redirection_data: None, mandate_reference: None, connector_metadata: None, network_txn_id: None, - connector_response_reference_id: Some(item.response.order_id.clone()), + connector_response_reference_id: item.response.order_id.clone(), + incremental_authorization_allowed: None, }), ..item.data }) @@ -444,6 +443,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: Some(item.response.order_id.clone()), + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/dummyconnector/transformers.rs b/crates/router/src/connector/dummyconnector/transformers.rs index dc707bde42cc..3c7bd2e09d9a 100644 --- a/crates/router/src/connector/dummyconnector/transformers.rs +++ b/crates/router/src/connector/dummyconnector/transformers.rs @@ -250,6 +250,7 @@ impl TryFrom connector_response_reference_id: Some( gateway_resp.transaction_processing_details.order_id, ), + incremental_authorization_allowed: None, }), ..item.data }) @@ -403,6 +404,7 @@ impl TryFrom })), network_txn_id: None, connector_response_reference_id: Some(transaction_id.to_string()), + incremental_authorization_allowed: None, }), ..item.data }) @@ -324,6 +325,7 @@ impl })), network_txn_id: None, connector_response_reference_id: Some(transaction_id.to_string()), + incremental_authorization_allowed: None, }), ..item.data }) @@ -391,6 +393,7 @@ impl TryFrom> })), network_txn_id: None, connector_response_reference_id: Some(item.response.transaction_id.to_string()), + incremental_authorization_allowed: None, }), amount_captured: None, ..item.data @@ -458,6 +461,7 @@ impl })), network_txn_id: None, connector_response_reference_id: Some(transaction_id.to_string()), + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/globalpay/transformers.rs b/crates/router/src/connector/globalpay/transformers.rs index 78a83e700267..9cef564b3795 100644 --- a/crates/router/src/connector/globalpay/transformers.rs +++ b/crates/router/src/connector/globalpay/transformers.rs @@ -234,6 +234,7 @@ fn get_payment_response( connector_metadata: None, network_txn_id: None, connector_response_reference_id: response.reference, + incremental_authorization_allowed: None, }), } } diff --git a/crates/router/src/connector/globepay/transformers.rs b/crates/router/src/connector/globepay/transformers.rs index ef23f48f5197..f6adacb814de 100644 --- a/crates/router/src/connector/globepay/transformers.rs +++ b/crates/router/src/connector/globepay/transformers.rs @@ -157,6 +157,7 @@ impl connector_metadata, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) @@ -230,6 +231,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/gocardless/transformers.rs b/crates/router/src/connector/gocardless/transformers.rs index 63e199657af0..249dae370b1a 100644 --- a/crates/router/src/connector/gocardless/transformers.rs +++ b/crates/router/src/connector/gocardless/transformers.rs @@ -577,6 +577,7 @@ impl response: Ok(types::PaymentsResponseData::TransactionResponse { connector_metadata: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, resource_id: ResponseId::NoResponseId, redirection_data: None, mandate_reference, @@ -732,6 +733,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) @@ -766,6 +768,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/helcim/transformers.rs b/crates/router/src/connector/helcim/transformers.rs index 9f405e2e2ea1..dc38b2eeb253 100644 --- a/crates/router/src/connector/helcim/transformers.rs +++ b/crates/router/src/connector/helcim/transformers.rs @@ -328,6 +328,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), status: enums::AttemptStatus::from(item.response), ..item.data @@ -382,6 +383,7 @@ impl connector_metadata, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), status: enums::AttemptStatus::from(item.response), ..item.data @@ -440,6 +442,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), status: enums::AttemptStatus::from(item.response), ..item.data @@ -526,6 +529,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), status: enums::AttemptStatus::from(item.response), ..item.data @@ -588,6 +592,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), status: enums::AttemptStatus::from(item.response), ..item.data diff --git a/crates/router/src/connector/iatapay/transformers.rs b/crates/router/src/connector/iatapay/transformers.rs index 7cdfafc858b6..b6d2dee4a01b 100644 --- a/crates/router/src/connector/iatapay/transformers.rs +++ b/crates/router/src/connector/iatapay/transformers.rs @@ -286,6 +286,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: connector_response_reference_id.clone(), + incremental_authorization_allowed: None, }), |checkout_methods| { Ok(types::PaymentsResponseData::TransactionResponse { @@ -299,6 +300,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: connector_response_reference_id.clone(), + incremental_authorization_allowed: None, }) }, ), diff --git a/crates/router/src/connector/klarna/transformers.rs b/crates/router/src/connector/klarna/transformers.rs index 563410ee99d0..0816dd82ec6b 100644 --- a/crates/router/src/connector/klarna/transformers.rs +++ b/crates/router/src/connector/klarna/transformers.rs @@ -167,6 +167,7 @@ impl TryFrom> connector_metadata: None, network_txn_id: None, connector_response_reference_id: Some(item.response.order_id.clone()), + incremental_authorization_allowed: None, }), status: item.response.fraud_status.into(), ..item.data diff --git a/crates/router/src/connector/mollie/transformers.rs b/crates/router/src/connector/mollie/transformers.rs index b77077ae709f..62fb94e236a8 100644 --- a/crates/router/src/connector/mollie/transformers.rs +++ b/crates/router/src/connector/mollie/transformers.rs @@ -531,6 +531,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: Some(item.response.id), + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/multisafepay/transformers.rs b/crates/router/src/connector/multisafepay/transformers.rs index 1780b77379c7..0a034724a629 100644 --- a/crates/router/src/connector/multisafepay/transformers.rs +++ b/crates/router/src/connector/multisafepay/transformers.rs @@ -262,10 +262,9 @@ impl TryFrom for Gateway { utils::CardIssuer::Visa => Ok(Self::Visa), utils::CardIssuer::DinersClub | utils::CardIssuer::JCB - | utils::CardIssuer::CarteBlanche => Err(errors::ConnectorError::NotSupported { - message: issuer.to_string(), - connector: "Multisafe pay", - } + | utils::CardIssuer::CarteBlanche => Err(errors::ConnectorError::NotImplemented( + utils::get_unimplemented_payment_method_error_message("Multisafe pay"), + ) .into()), } } @@ -694,6 +693,7 @@ impl connector_response_reference_id: Some( payment_response.data.order_id.clone(), ), + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/nexinets/transformers.rs b/crates/router/src/connector/nexinets/transformers.rs index 15cbe9a7e28e..8875abdb7868 100644 --- a/crates/router/src/connector/nexinets/transformers.rs +++ b/crates/router/src/connector/nexinets/transformers.rs @@ -372,6 +372,7 @@ impl connector_metadata: Some(connector_metadata), network_txn_id: None, connector_response_reference_id: Some(item.response.order_id), + incremental_authorization_allowed: None, }), ..item.data }) @@ -455,6 +456,7 @@ impl connector_metadata: Some(connector_metadata), network_txn_id: None, connector_response_reference_id: Some(item.response.order.order_id), + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/nmi/transformers.rs b/crates/router/src/connector/nmi/transformers.rs index ff3a1e6a1c54..35c0e102020e 100644 --- a/crates/router/src/connector/nmi/transformers.rs +++ b/crates/router/src/connector/nmi/transformers.rs @@ -322,6 +322,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), enums::AttemptStatus::CaptureInitiated, ), @@ -415,6 +416,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), enums::AttemptStatus::Charged, ), @@ -470,6 +472,7 @@ impl TryFrom> connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), if let Some(diesel_models::enums::CaptureMethod::Automatic) = item.data.request.capture_method @@ -519,6 +522,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), enums::AttemptStatus::VoidInitiated, ), @@ -570,6 +574,7 @@ impl TryFrom> connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/noon/transformers.rs b/crates/router/src/connector/noon/transformers.rs index ee3a8ba8c532..b478d63e0f12 100644 --- a/crates/router/src/connector/noon/transformers.rs +++ b/crates/router/src/connector/noon/transformers.rs @@ -527,6 +527,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id, + incremental_authorization_allowed: None, }) } }, diff --git a/crates/router/src/connector/nuvei/transformers.rs b/crates/router/src/connector/nuvei/transformers.rs index 36244b8bc0d8..73e039c63395 100644 --- a/crates/router/src/connector/nuvei/transformers.rs +++ b/crates/router/src/connector/nuvei/transformers.rs @@ -1452,6 +1452,7 @@ where }, network_txn_id: None, connector_response_reference_id: response.order_id, + incremental_authorization_allowed: None, }) }, ..item.data diff --git a/crates/router/src/connector/opayo/transformers.rs b/crates/router/src/connector/opayo/transformers.rs index 5e9fb066c78d..7b633f6aa641 100644 --- a/crates/router/src/connector/opayo/transformers.rs +++ b/crates/router/src/connector/opayo/transformers.rs @@ -123,6 +123,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: Some(item.response.transaction_id), + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/opennode/transformers.rs b/crates/router/src/connector/opennode/transformers.rs index 794fc8573417..7670166fabaf 100644 --- a/crates/router/src/connector/opennode/transformers.rs +++ b/crates/router/src/connector/opennode/transformers.rs @@ -150,6 +150,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: item.response.data.order_id, + incremental_authorization_allowed: None, }) } else { Ok(types::PaymentsResponseData::TransactionUnresolvedResponse { diff --git a/crates/router/src/connector/payeezy/transformers.rs b/crates/router/src/connector/payeezy/transformers.rs index 90c58c3a9bce..0170d18ecb46 100644 --- a/crates/router/src/connector/payeezy/transformers.rs +++ b/crates/router/src/connector/payeezy/transformers.rs @@ -440,6 +440,7 @@ impl .reference .unwrap_or(item.response.transaction_id), ), + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/payme/transformers.rs b/crates/router/src/connector/payme/transformers.rs index e751de20e219..e3d54881f1f2 100644 --- a/crates/router/src/connector/payme/transformers.rs +++ b/crates/router/src/connector/payme/transformers.rs @@ -262,6 +262,7 @@ impl TryFrom<&PaymePaySaleResponse> for types::PaymentsResponseData { ), network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }) } } @@ -326,6 +327,7 @@ impl From<&SaleQuery> for types::PaymentsResponseData { connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, } } } @@ -535,6 +537,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }), diff --git a/crates/router/src/connector/paypal.rs b/crates/router/src/connector/paypal.rs index 9ab19b295570..a0d391789020 100644 --- a/crates/router/src/connector/paypal.rs +++ b/crates/router/src/connector/paypal.rs @@ -570,42 +570,95 @@ impl .parse_struct("paypal PaypalPreProcessingResponse") .change_context(errors::ConnectorError::ResponseDeserializationFailed)?; - // permutation for status to continue payment - match ( - response - .payment_source - .card - .authentication_result - .three_d_secure - .enrollment_status - .as_ref(), - response - .payment_source - .card - .authentication_result - .three_d_secure - .authentication_status - .as_ref(), - response - .payment_source - .card - .authentication_result - .liability_shift - .clone(), - ) { - ( - Some(paypal::EnrollementStatus::Ready), - Some(paypal::AuthenticationStatus::Success), - paypal::LiabilityShift::Possible, - ) - | ( - Some(paypal::EnrollementStatus::Ready), - Some(paypal::AuthenticationStatus::Attempted), - paypal::LiabilityShift::Possible, - ) - | (Some(paypal::EnrollementStatus::NotReady), None, paypal::LiabilityShift::No) - | (Some(paypal::EnrollementStatus::Unavailable), None, paypal::LiabilityShift::No) - | (Some(paypal::EnrollementStatus::Bypassed), None, paypal::LiabilityShift::No) => { + match response { + // if card supports 3DS check for liability + paypal::PaypalPreProcessingResponse::PaypalLiabilityResponse(liability_response) => { + // permutation for status to continue payment + match ( + liability_response + .payment_source + .card + .authentication_result + .three_d_secure + .enrollment_status + .as_ref(), + liability_response + .payment_source + .card + .authentication_result + .three_d_secure + .authentication_status + .as_ref(), + liability_response + .payment_source + .card + .authentication_result + .liability_shift + .clone(), + ) { + ( + Some(paypal::EnrollementStatus::Ready), + Some(paypal::AuthenticationStatus::Success), + paypal::LiabilityShift::Possible, + ) + | ( + Some(paypal::EnrollementStatus::Ready), + Some(paypal::AuthenticationStatus::Attempted), + paypal::LiabilityShift::Possible, + ) + | (Some(paypal::EnrollementStatus::NotReady), None, paypal::LiabilityShift::No) + | (Some(paypal::EnrollementStatus::Unavailable), None, paypal::LiabilityShift::No) + | (Some(paypal::EnrollementStatus::Bypassed), None, paypal::LiabilityShift::No) => { + Ok(types::PaymentsPreProcessingRouterData { + status: storage_enums::AttemptStatus::AuthenticationSuccessful, + response: Ok(types::PaymentsResponseData::TransactionResponse { + resource_id: types::ResponseId::NoResponseId, + redirection_data: None, + mandate_reference: None, + connector_metadata: None, + network_txn_id: None, + connector_response_reference_id: None, + incremental_authorization_allowed: None, + }), + ..data.clone() + }) + } + _ => Ok(types::PaymentsPreProcessingRouterData { + response: Err(ErrorResponse { + attempt_status: Some(enums::AttemptStatus::Failure), + code: consts::NO_ERROR_CODE.to_string(), + message: consts::NO_ERROR_MESSAGE.to_string(), + connector_transaction_id: None, + reason: Some(format!("{} Connector Responsded with LiabilityShift: {:?}, EnrollmentStatus: {:?}, and AuthenticationStatus: {:?}", + consts::CANNOT_CONTINUE_AUTH, + liability_response + .payment_source + .card + .authentication_result + .liability_shift, + liability_response + .payment_source + .card + .authentication_result + .three_d_secure + .enrollment_status + .unwrap_or(paypal::EnrollementStatus::Null), + liability_response + .payment_source + .card + .authentication_result + .three_d_secure + .authentication_status + .unwrap_or(paypal::AuthenticationStatus::Null), + )), + status_code: res.status_code, + }), + ..data.clone() + }), + } + } + // if card does not supports 3DS check for liability + paypal::PaypalPreProcessingResponse::PaypalNonLiablityResponse(_) => { Ok(types::PaymentsPreProcessingRouterData { status: storage_enums::AttemptStatus::AuthenticationSuccessful, response: Ok(types::PaymentsResponseData::TransactionResponse { @@ -615,42 +668,11 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..data.clone() }) } - _ => Ok(types::PaymentsPreProcessingRouterData { - response: Err(ErrorResponse { - attempt_status: Some(enums::AttemptStatus::Failure), - code: consts::NO_ERROR_CODE.to_string(), - message: consts::NO_ERROR_MESSAGE.to_string(), - connector_transaction_id: None, - reason: Some(format!("{} Connector Responsded with LiabilityShift: {:?}, EnrollmentStatus: {:?}, and AuthenticationStatus: {:?}", - consts::CANNOT_CONTINUE_AUTH, - response - .payment_source - .card - .authentication_result - .liability_shift, - response - .payment_source - .card - .authentication_result - .three_d_secure - .enrollment_status - .unwrap_or(paypal::EnrollementStatus::Null), - response - .payment_source - .card - .authentication_result - .three_d_secure - .authentication_status - .unwrap_or(paypal::AuthenticationStatus::Null), - )), - status_code: res.status_code, - }), - ..data.clone() - }), } } diff --git a/crates/router/src/connector/paypal/transformers.rs b/crates/router/src/connector/paypal/transformers.rs index 04328cead233..8b6a2297d090 100644 --- a/crates/router/src/connector/paypal/transformers.rs +++ b/crates/router/src/connector/paypal/transformers.rs @@ -926,10 +926,22 @@ pub struct PaypalThreeDsResponse { } #[derive(Debug, Clone, Serialize, Deserialize)] -pub struct PaypalPreProcessingResponse { +#[serde(untagged)] +pub enum PaypalPreProcessingResponse { + PaypalLiabilityResponse(PaypalLiabilityResponse), + PaypalNonLiablityResponse(PaypalNonLiablityResponse), +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PaypalLiabilityResponse { pub payment_source: CardParams, } +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PaypalNonLiablityResponse { + payment_source: CardsData, +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct CardParams { pub card: AuthResult, @@ -1174,6 +1186,7 @@ impl .invoice_id .clone() .or(Some(item.response.id)), + incremental_authorization_allowed: None, }), ..item.data }) @@ -1278,6 +1291,7 @@ impl connector_response_reference_id: Some( purchase_units.map_or(item.response.id, |item| item.invoice_id.clone()), ), + incremental_authorization_allowed: None, }), ..item.data }) @@ -1314,6 +1328,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) @@ -1363,6 +1378,7 @@ impl connector_metadata: Some(connector_meta), network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) @@ -1430,6 +1446,7 @@ impl .invoice_id .clone() .or(Some(item.response.supplementary_data.related_ids.order_id)), + incremental_authorization_allowed: None, }), ..item.data }) @@ -1531,6 +1548,7 @@ impl TryFrom> .response .invoice_id .or(Some(item.response.id)), + incremental_authorization_allowed: None, }), amount_captured: Some(amount_captured), ..item.data @@ -1581,6 +1599,7 @@ impl .response .invoice_id .or(Some(item.response.id)), + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/payu/transformers.rs b/crates/router/src/connector/payu/transformers.rs index 9a2e14215c75..6edc570eb451 100644 --- a/crates/router/src/connector/payu/transformers.rs +++ b/crates/router/src/connector/payu/transformers.rs @@ -205,6 +205,7 @@ impl .response .ext_order_id .or(Some(item.response.order_id)), + incremental_authorization_allowed: None, }), amount_captured: None, ..item.data @@ -257,6 +258,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), amount_captured: None, ..item.data @@ -342,6 +344,7 @@ impl .response .ext_order_id .or(Some(item.response.order_id)), + incremental_authorization_allowed: None, }), amount_captured: None, ..item.data @@ -475,6 +478,7 @@ impl .ext_order_id .clone() .or(Some(order.order_id.clone())), + incremental_authorization_allowed: None, }), amount_captured: Some( order diff --git a/crates/router/src/connector/powertranz/transformers.rs b/crates/router/src/connector/powertranz/transformers.rs index a631a126ed3f..e0ecd81c7e58 100644 --- a/crates/router/src/connector/powertranz/transformers.rs +++ b/crates/router/src/connector/powertranz/transformers.rs @@ -328,6 +328,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: Some(item.response.order_identifier), + incremental_authorization_allowed: None, }), Err, ); diff --git a/crates/router/src/connector/prophetpay/transformers.rs b/crates/router/src/connector/prophetpay/transformers.rs index d81b931edfc9..d05f2c3986a7 100644 --- a/crates/router/src/connector/prophetpay/transformers.rs +++ b/crates/router/src/connector/prophetpay/transformers.rs @@ -219,6 +219,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) @@ -407,6 +408,7 @@ impl connector_metadata, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) @@ -456,6 +458,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) @@ -505,6 +508,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/rapyd/transformers.rs b/crates/router/src/connector/rapyd/transformers.rs index 898b6ed6d147..193eb8198926 100644 --- a/crates/router/src/connector/rapyd/transformers.rs +++ b/crates/router/src/connector/rapyd/transformers.rs @@ -487,6 +487,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ) } diff --git a/crates/router/src/connector/shift4/transformers.rs b/crates/router/src/connector/shift4/transformers.rs index c272a5b6fc12..ce68aad25c50 100644 --- a/crates/router/src/connector/shift4/transformers.rs +++ b/crates/router/src/connector/shift4/transformers.rs @@ -168,10 +168,9 @@ impl TryFrom<&types::RouterData { - Err(errors::ConnectorError::NotSupported { - message: utils::SELECTED_PAYMENT_METHOD.to_string(), - connector: "Shift4", - } + Err(errors::ConnectorError::NotImplemented( + utils::get_unimplemented_payment_method_error_message("Shift4"), + ) .into()) } } @@ -184,13 +183,8 @@ impl TryFrom<&api_models::payments::WalletData> for Shift4PaymentMethod { match wallet_data { payments::WalletData::AliPayRedirect(_) | payments::WalletData::ApplePay(_) - | payments::WalletData::WeChatPayRedirect(_) => { - Err(errors::ConnectorError::NotImplemented( - utils::get_unimplemented_payment_method_error_message("Shift4"), - ) - .into()) - } - payments::WalletData::AliPayQr(_) + | payments::WalletData::WeChatPayRedirect(_) + | payments::WalletData::AliPayQr(_) | payments::WalletData::AliPayHkRedirect(_) | payments::WalletData::MomoRedirect(_) | payments::WalletData::KakaoPayRedirect(_) @@ -212,10 +206,9 @@ impl TryFrom<&api_models::payments::WalletData> for Shift4PaymentMethod { | payments::WalletData::TouchNGoRedirect(_) | payments::WalletData::WeChatPayQr(_) | payments::WalletData::CashappQr(_) - | payments::WalletData::SwishQr(_) => Err(errors::ConnectorError::NotSupported { - message: utils::SELECTED_PAYMENT_METHOD.to_string(), - connector: "Shift4", - } + | payments::WalletData::SwishQr(_) => Err(errors::ConnectorError::NotImplemented( + utils::get_unimplemented_payment_method_error_message("Shift4"), + ) .into()), } } @@ -227,13 +220,8 @@ impl TryFrom<&api_models::payments::BankTransferData> for Shift4PaymentMethod { bank_transfer_data: &api_models::payments::BankTransferData, ) -> Result { match bank_transfer_data { - payments::BankTransferData::MultibancoBankTransfer { .. } => { - Err(errors::ConnectorError::NotImplemented( - utils::get_unimplemented_payment_method_error_message("Shift4"), - ) - .into()) - } - payments::BankTransferData::AchBankTransfer { .. } + payments::BankTransferData::MultibancoBankTransfer { .. } + | payments::BankTransferData::AchBankTransfer { .. } | payments::BankTransferData::SepaBankTransfer { .. } | payments::BankTransferData::BacsBankTransfer { .. } | payments::BankTransferData::PermataBankTransfer { .. } @@ -244,10 +232,9 @@ impl TryFrom<&api_models::payments::BankTransferData> for Shift4PaymentMethod { | payments::BankTransferData::DanamonVaBankTransfer { .. } | payments::BankTransferData::MandiriVaBankTransfer { .. } | payments::BankTransferData::Pix {} - | payments::BankTransferData::Pse {} => Err(errors::ConnectorError::NotSupported { - message: utils::SELECTED_PAYMENT_METHOD.to_string(), - connector: "Shift4", - } + | payments::BankTransferData::Pse {} => Err(errors::ConnectorError::NotImplemented( + utils::get_unimplemented_payment_method_error_message("Shift4"), + ) .into()), } } @@ -257,11 +244,8 @@ impl TryFrom<&api_models::payments::VoucherData> for Shift4PaymentMethod { type Error = Error; fn try_from(voucher_data: &api_models::payments::VoucherData) -> Result { match voucher_data { - payments::VoucherData::Boleto(_) => Err(errors::ConnectorError::NotImplemented( - utils::get_unimplemented_payment_method_error_message("Shift4"), - ) - .into()), - payments::VoucherData::Efecty + payments::VoucherData::Boleto(_) + | payments::VoucherData::Efecty | payments::VoucherData::PagoEfectivo | payments::VoucherData::RedCompra | payments::VoucherData::RedPagos @@ -273,10 +257,9 @@ impl TryFrom<&api_models::payments::VoucherData> for Shift4PaymentMethod { | payments::VoucherData::MiniStop(_) | payments::VoucherData::FamilyMart(_) | payments::VoucherData::Seicomart(_) - | payments::VoucherData::PayEasy(_) => Err(errors::ConnectorError::NotSupported { - message: utils::SELECTED_PAYMENT_METHOD.to_string(), - connector: "Shift4", - } + | payments::VoucherData::PayEasy(_) => Err(errors::ConnectorError::NotImplemented( + utils::get_unimplemented_payment_method_error_message("Shift4"), + ) .into()), } } @@ -286,15 +269,12 @@ impl TryFrom<&api_models::payments::GiftCardData> for Shift4PaymentMethod { type Error = Error; fn try_from(gift_card_data: &api_models::payments::GiftCardData) -> Result { match gift_card_data { - payments::GiftCardData::Givex(_) => Err(errors::ConnectorError::NotSupported { - message: utils::SELECTED_PAYMENT_METHOD.to_string(), - connector: "Shift4", + payments::GiftCardData::Givex(_) | payments::GiftCardData::PaySafeCard {} => { + Err(errors::ConnectorError::NotImplemented( + utils::get_unimplemented_payment_method_error_message("Shift4"), + ) + .into()) } - .into()), - payments::GiftCardData::PaySafeCard {} => Err(errors::ConnectorError::NotImplemented( - utils::get_unimplemented_payment_method_error_message("Shift4"), - ) - .into()), } } } @@ -401,10 +381,9 @@ impl TryFrom<&types::RouterData Err(errors::ConnectorError::NotSupported { - message: "Flow".to_string(), - connector: "Shift4", - } + | None => Err(errors::ConnectorError::NotImplemented( + utils::get_unimplemented_payment_method_error_message("Shift4"), + ) .into()), } } @@ -421,13 +400,8 @@ impl TryFrom<&payments::BankRedirectData> for PaymentMethodType { payments::BankRedirectData::BancontactCard { .. } | payments::BankRedirectData::Blik { .. } | payments::BankRedirectData::Trustly { .. } - | payments::BankRedirectData::Przelewy24 { .. } => { - Err(errors::ConnectorError::NotImplemented( - utils::get_unimplemented_payment_method_error_message("Shift4"), - ) - .into()) - } - payments::BankRedirectData::Bizum {} + | payments::BankRedirectData::Przelewy24 { .. } + | payments::BankRedirectData::Bizum {} | payments::BankRedirectData::Interac { .. } | payments::BankRedirectData::OnlineBankingCzechRepublic { .. } | payments::BankRedirectData::OnlineBankingFinland { .. } @@ -436,10 +410,9 @@ impl TryFrom<&payments::BankRedirectData> for PaymentMethodType { | payments::BankRedirectData::OpenBankingUk { .. } | payments::BankRedirectData::OnlineBankingFpx { .. } | payments::BankRedirectData::OnlineBankingThailand { .. } => { - Err(errors::ConnectorError::NotSupported { - message: utils::SELECTED_PAYMENT_METHOD.to_string(), - connector: "Shift4", - } + Err(errors::ConnectorError::NotImplemented( + utils::get_unimplemented_payment_method_error_message("Shift4"), + ) .into()) } } @@ -702,6 +675,7 @@ impl ), network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) @@ -743,6 +717,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: Some(item.response.id), + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/square/transformers.rs b/crates/router/src/connector/square/transformers.rs index 6024a20fa6ab..7343ef58bb08 100644 --- a/crates/router/src/connector/square/transformers.rs +++ b/crates/router/src/connector/square/transformers.rs @@ -401,6 +401,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: item.response.payment.reference_id, + incremental_authorization_allowed: None, }), amount_captured, ..item.data diff --git a/crates/router/src/connector/stax/transformers.rs b/crates/router/src/connector/stax/transformers.rs index bb37bf1fc9e7..2fd3b3474ea4 100644 --- a/crates/router/src/connector/stax/transformers.rs +++ b/crates/router/src/connector/stax/transformers.rs @@ -63,10 +63,9 @@ impl TryFrom<&StaxRouterData<&types::PaymentsAuthorizeRouterData>> for StaxPayme item: &StaxRouterData<&types::PaymentsAuthorizeRouterData>, ) -> Result { if item.router_data.request.currency != enums::Currency::USD { - Err(errors::ConnectorError::NotSupported { - message: item.router_data.request.currency.to_string(), - connector: "Stax", - })? + Err(errors::ConnectorError::NotImplemented( + utils::get_unimplemented_payment_method_error_message("Stax"), + ))? } let total = item.amount; @@ -119,10 +118,9 @@ impl TryFrom<&StaxRouterData<&types::PaymentsAuthorizeRouterData>> for StaxPayme | api::PaymentMethodData::GiftCard(_) | api::PaymentMethodData::CardRedirect(_) | api::PaymentMethodData::Upi(_) - | api::PaymentMethodData::CardToken(_) => Err(errors::ConnectorError::NotSupported { - message: "SELECTED_PAYMENT_METHOD".to_string(), - connector: "Stax", - })?, + | api::PaymentMethodData::CardToken(_) => Err(errors::ConnectorError::NotImplemented( + utils::get_unimplemented_payment_method_error_message("Stax"), + ))?, } } } @@ -270,10 +268,9 @@ impl TryFrom<&types::TokenizationRouterData> for StaxTokenRequest { | api::PaymentMethodData::GiftCard(_) | api::PaymentMethodData::CardRedirect(_) | api::PaymentMethodData::Upi(_) - | api::PaymentMethodData::CardToken(_) => Err(errors::ConnectorError::NotSupported { - message: "SELECTED_PAYMENT_METHOD".to_string(), - connector: "Stax", - })?, + | api::PaymentMethodData::CardToken(_) => Err(errors::ConnectorError::NotImplemented( + utils::get_unimplemented_payment_method_error_message("Stax"), + ))?, } } } @@ -370,6 +367,7 @@ impl connector_response_reference_id: Some( item.response.idempotency_id.unwrap_or(item.response.id), ), + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/stripe/transformers.rs b/crates/router/src/connector/stripe/transformers.rs index ae7fe59be96c..182479604539 100644 --- a/crates/router/src/connector/stripe/transformers.rs +++ b/crates/router/src/connector/stripe/transformers.rs @@ -2334,6 +2334,7 @@ impl connector_metadata, network_txn_id, connector_response_reference_id: Some(item.response.id), + incremental_authorization_allowed: None, }), amount_captured: item.response.amount_received, ..item.data @@ -2494,6 +2495,7 @@ impl connector_metadata, network_txn_id: None, connector_response_reference_id: Some(item.response.id.clone()), + incremental_authorization_allowed: None, }), Err, ); @@ -2535,6 +2537,7 @@ impl connector_metadata: None, network_txn_id: Option::foreign_from(item.response.latest_attempt), connector_response_reference_id: Some(item.response.id), + incremental_authorization_allowed: None, }), ..item.data }) @@ -3076,6 +3079,7 @@ impl TryFrom (bool, &'static str) { true, "Transaction declined (maximum transaction frequency exceeded)", ), + "800.100.165" => (true, "Transaction declined (card lost)"), "800.100.168" => (true, "Transaction declined (restricted card)"), "800.100.170" => (true, "Transaction declined (transaction not permitted)"), "800.100.171" => (true, "transaction declined (pick up card)"), @@ -512,6 +513,10 @@ fn is_payment_failed(payment_status: &str) -> (bool, &'static str) { true, "Transaction for the same session is currently being processed, please try again later", ), + "900.100.100" => ( + true, + "Unexpected communication error with connector/acquirer", + ), "900.100.300" => (true, "Timeout, uncertain result"), _ => (false, ""), } @@ -729,6 +734,7 @@ fn handle_cards_response( connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }; Ok((status, error, payment_response_data)) } @@ -757,6 +763,7 @@ fn handle_bank_redirects_response( connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }; Ok((status, error, payment_response_data)) } @@ -789,6 +796,7 @@ fn handle_bank_redirects_error_response( connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }; Ok((status, error, payment_response_data)) } @@ -831,6 +839,7 @@ fn handle_bank_redirects_sync_response( connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }; Ok((status, error, payment_response_data)) } @@ -853,6 +862,7 @@ pub fn handle_webhook_response( connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }; Ok((status, None, payment_response_data)) } diff --git a/crates/router/src/connector/tsys/transformers.rs b/crates/router/src/connector/tsys/transformers.rs index 863b754fc89c..8c9c6cd43df4 100644 --- a/crates/router/src/connector/tsys/transformers.rs +++ b/crates/router/src/connector/tsys/transformers.rs @@ -218,6 +218,7 @@ fn get_payments_response(connector_response: TsysResponse) -> types::PaymentsRes connector_metadata: None, network_txn_id: None, connector_response_reference_id: Some(connector_response.transaction_id), + incremental_authorization_allowed: None, } } @@ -241,6 +242,7 @@ fn get_payments_sync_response( .transaction_id .clone(), ), + incremental_authorization_allowed: None, } } diff --git a/crates/router/src/connector/volt/transformers.rs b/crates/router/src/connector/volt/transformers.rs index efed7c797c76..cea56feb7145 100644 --- a/crates/router/src/connector/volt/transformers.rs +++ b/crates/router/src/connector/volt/transformers.rs @@ -130,10 +130,9 @@ impl TryFrom<&VoltRouterData<&types::PaymentsAuthorizeRouterData>> for VoltPayme | api_models::payments::BankRedirectData::Trustly { .. } | api_models::payments::BankRedirectData::OnlineBankingFpx { .. } | api_models::payments::BankRedirectData::OnlineBankingThailand { .. } => { - Err(errors::ConnectorError::NotSupported { - message: utils::SELECTED_PAYMENT_METHOD.to_string(), - connector: "Volt", - } + Err(errors::ConnectorError::NotImplemented( + utils::get_unimplemented_payment_method_error_message("Volt"), + ) .into()) } }, @@ -150,10 +149,9 @@ impl TryFrom<&VoltRouterData<&types::PaymentsAuthorizeRouterData>> for VoltPayme | api_models::payments::PaymentMethodData::Voucher(_) | api_models::payments::PaymentMethodData::GiftCard(_) | api_models::payments::PaymentMethodData::CardToken(_) => { - Err(errors::ConnectorError::NotSupported { - message: utils::SELECTED_PAYMENT_METHOD.to_string(), - connector: "Volt", - } + Err(errors::ConnectorError::NotImplemented( + utils::get_unimplemented_payment_method_error_message("Volt"), + ) .into()) } } @@ -286,6 +284,7 @@ impl connector_metadata: None, network_txn_id: None, connector_response_reference_id: Some(item.response.id), + incremental_authorization_allowed: None, }), ..item.data }) @@ -337,6 +336,7 @@ impl TryFrom TryFrom TryFrom> connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }), ..item.data }) diff --git a/crates/router/src/connector/zen/transformers.rs b/crates/router/src/connector/zen/transformers.rs index 64f6d5bf1a07..c66b098fe751 100644 --- a/crates/router/src/connector/zen/transformers.rs +++ b/crates/router/src/connector/zen/transformers.rs @@ -940,6 +940,7 @@ impl TryFrom TryFrom = CustomResult; pub type UserResponse = CustomResult, UserErrors>; +pub mod sample_data; #[derive(Debug, thiserror::Error)] pub enum UserErrors { @@ -31,6 +32,18 @@ pub enum UserErrors { DuplicateOrganizationId, #[error("MerchantIdNotFound")] MerchantIdNotFound, + #[error("MetadataAlreadySet")] + MetadataAlreadySet, + #[error("InvalidRoleId")] + InvalidRoleId, + #[error("InvalidRoleOperation")] + InvalidRoleOperation, + #[error("IpAddressParsingFailed")] + IpAddressParsingFailed, + #[error("InvalidMetadataRequest")] + InvalidMetadataRequest, + #[error("MerchantIdParsingError")] + MerchantIdParsingError, } impl common_utils::errors::ErrorSwitch for UserErrors { @@ -77,14 +90,38 @@ impl common_utils::errors::ErrorSwitch { AER::BadRequest(ApiError::new(sub_code, 16, "Invalid Email", None)) } + Self::MerchantIdNotFound => { + AER::BadRequest(ApiError::new(sub_code, 18, "Invalid Merchant ID", None)) + } + Self::MetadataAlreadySet => { + AER::BadRequest(ApiError::new(sub_code, 19, "Metadata already set", None)) + } Self::DuplicateOrganizationId => AER::InternalServerError(ApiError::new( sub_code, 21, "An Organization with the id already exists", None, )), - Self::MerchantIdNotFound => { - AER::BadRequest(ApiError::new(sub_code, 18, "Invalid Merchant ID", None)) + Self::InvalidRoleId => { + AER::BadRequest(ApiError::new(sub_code, 22, "Invalid Role ID", None)) + } + Self::InvalidRoleOperation => AER::BadRequest(ApiError::new( + sub_code, + 23, + "User Role Operation Not Supported", + None, + )), + Self::IpAddressParsingFailed => { + AER::InternalServerError(ApiError::new(sub_code, 24, "Something Went Wrong", None)) + } + Self::InvalidMetadataRequest => AER::BadRequest(ApiError::new( + sub_code, + 26, + "Invalid Metadata Request", + None, + )), + Self::MerchantIdParsingError => { + AER::BadRequest(ApiError::new(sub_code, 28, "Invalid Merchant Id", None)) } } } diff --git a/crates/router/src/core/errors/user/sample_data.rs b/crates/router/src/core/errors/user/sample_data.rs new file mode 100644 index 000000000000..11233b27b5cd --- /dev/null +++ b/crates/router/src/core/errors/user/sample_data.rs @@ -0,0 +1,73 @@ +use api_models::errors::types::{ApiError, ApiErrorResponse}; +use common_utils::errors::{CustomResult, ErrorSwitch, ErrorSwitchFrom}; +use data_models::errors::StorageError; + +pub type SampleDataResult = CustomResult; + +#[derive(Debug, Clone, serde::Serialize, thiserror::Error)] +pub enum SampleDataError { + #[error["Internal Server Error"]] + InternalServerError, + #[error("Data Does Not Exist")] + DataDoesNotExist, + #[error("Server Error")] + DatabaseError, + #[error("Merchant Id Not Found")] + MerchantIdNotFound, + #[error("Invalid Parameters")] + InvalidParameters, + #[error["Invalid Records"]] + InvalidRange, +} + +impl ErrorSwitch for SampleDataError { + fn switch(&self) -> ApiErrorResponse { + match self { + Self::InternalServerError => ApiErrorResponse::InternalServerError(ApiError::new( + "SD", + 0, + "Something went wrong", + None, + )), + Self::DatabaseError => ApiErrorResponse::InternalServerError(ApiError::new( + "SD", + 1, + "Server Error(DB is down)", + None, + )), + Self::DataDoesNotExist => ApiErrorResponse::NotFound(ApiError::new( + "SD", + 2, + "Sample Data not present for given request", + None, + )), + Self::MerchantIdNotFound => ApiErrorResponse::BadRequest(ApiError::new( + "SD", + 3, + "Merchant ID not provided", + None, + )), + Self::InvalidParameters => ApiErrorResponse::BadRequest(ApiError::new( + "SD", + 4, + "Invalid parameters to generate Sample Data", + None, + )), + Self::InvalidRange => ApiErrorResponse::BadRequest(ApiError::new( + "SD", + 5, + "Records to be generated should be between range 10 and 100", + None, + )), + } + } +} + +impl ErrorSwitchFrom for SampleDataError { + fn switch_from(error: &StorageError) -> Self { + match matches!(error, StorageError::ValueNotFound(_)) { + true => Self::DataDoesNotExist, + false => Self::DatabaseError, + } + } +} diff --git a/crates/router/src/core/payment_methods.rs b/crates/router/src/core/payment_methods.rs index 1049137a9470..a2dbfb1480c4 100644 --- a/crates/router/src/core/payment_methods.rs +++ b/crates/router/src/core/payment_methods.rs @@ -42,7 +42,6 @@ pub trait PaymentMethodRetrieve { key_store: &domain::MerchantKeyStore, token: &storage::PaymentTokenData, payment_intent: &PaymentIntent, - card_cvc: Option>, card_token_data: Option<&CardToken>, ) -> RouterResult>; } @@ -126,7 +125,6 @@ impl PaymentMethodRetrieve for Oss { merchant_key_store: &domain::MerchantKeyStore, token_data: &storage::PaymentTokenData, payment_intent: &PaymentIntent, - card_cvc: Option>, card_token_data: Option<&CardToken>, ) -> RouterResult> { match token_data { @@ -135,7 +133,6 @@ impl PaymentMethodRetrieve for Oss { state, &generic_token.token, payment_intent, - card_cvc, merchant_key_store, card_token_data, ) @@ -147,7 +144,6 @@ impl PaymentMethodRetrieve for Oss { state, &generic_token.token, payment_intent, - card_cvc, merchant_key_store, card_token_data, ) @@ -159,7 +155,6 @@ impl PaymentMethodRetrieve for Oss { state, &card_token.token, payment_intent, - card_cvc, card_token_data, ) .await @@ -171,7 +166,6 @@ impl PaymentMethodRetrieve for Oss { state, &card_token.token, payment_intent, - card_cvc, card_token_data, ) .await diff --git a/crates/router/src/core/payment_methods/cards.rs b/crates/router/src/core/payment_methods/cards.rs index 044e270a7ea9..545733e298ab 100644 --- a/crates/router/src/core/payment_methods/cards.rs +++ b/crates/router/src/core/payment_methods/cards.rs @@ -225,12 +225,21 @@ pub async fn add_card_to_locker( ) .await .map_err(|error| { - metrics::CARD_LOCKER_FAILURES.add(&metrics::CONTEXT, 1, &[]); + metrics::CARD_LOCKER_FAILURES.add( + &metrics::CONTEXT, + 1, + &[ + router_env::opentelemetry::KeyValue::new("locker", "basilisk"), + router_env::opentelemetry::KeyValue::new("operation", "add"), + ], + ); error }) }, &metrics::CARD_ADD_TIME, - &[], + &[router_env::opentelemetry::KeyValue::new( + "locker", "basilisk", + )], ) .await?; logger::debug!("card added to basilisk locker"); @@ -248,22 +257,45 @@ pub async fn add_card_to_locker( ) .await .map_err(|error| { - metrics::CARD_LOCKER_FAILURES.add(&metrics::CONTEXT, 1, &[]); + metrics::CARD_LOCKER_FAILURES.add( + &metrics::CONTEXT, + 1, + &[ + router_env::opentelemetry::KeyValue::new("locker", "rust"), + router_env::opentelemetry::KeyValue::new("operation", "add"), + ], + ); error }) }, &metrics::CARD_ADD_TIME, - &[], + &[router_env::opentelemetry::KeyValue::new("locker", "rust")], ) .await; match add_card_to_rs_resp { value @ Ok(_) => { - logger::debug!("Card added successfully"); + logger::debug!("card added to rust locker"); + let _ = &metrics::CARD_LOCKER_SUCCESSFUL_RESPONSE.add( + &metrics::CONTEXT, + 1, + &[ + router_env::opentelemetry::KeyValue::new("locker", "rust"), + router_env::opentelemetry::KeyValue::new("operation", "add"), + ], + ); value } Err(err) => { - logger::debug!(error =? err,"failed to add card"); + logger::debug!(error =? err,"failed to add card to rust locker"); + let _ = &metrics::CARD_LOCKER_SUCCESSFUL_RESPONSE.add( + &metrics::CONTEXT, + 1, + &[ + router_env::opentelemetry::KeyValue::new("locker", "basilisk"), + router_env::opentelemetry::KeyValue::new("operation", "add"), + ], + ); Ok(add_card_to_hs_resp) } } @@ -290,12 +322,19 @@ pub async fn get_card_from_locker( .change_context(errors::ApiErrorResponse::InternalServerError) .attach_printable("Failed while getting card from basilisk_hs") .map_err(|error| { - metrics::CARD_LOCKER_FAILURES.add(&metrics::CONTEXT, 1, &[]); + metrics::CARD_LOCKER_FAILURES.add( + &metrics::CONTEXT, + 1, + &[ + router_env::opentelemetry::KeyValue::new("locker", "rust"), + router_env::opentelemetry::KeyValue::new("operation", "get"), + ], + ); error }) }, &metrics::CARD_GET_TIME, - &[], + &[router_env::opentelemetry::KeyValue::new("locker", "rust")], ) .await; @@ -313,20 +352,45 @@ pub async fn get_card_from_locker( .change_context(errors::ApiErrorResponse::InternalServerError) .attach_printable("Failed while getting card from basilisk_hs") .map_err(|error| { - metrics::CARD_LOCKER_FAILURES.add(&metrics::CONTEXT, 1, &[]); + metrics::CARD_LOCKER_FAILURES.add( + &metrics::CONTEXT, + 1, + &[ + router_env::opentelemetry::KeyValue::new("locker", "basilisk"), + router_env::opentelemetry::KeyValue::new("operation", "get"), + ], + ); error }) }, &metrics::CARD_GET_TIME, - &[], + &[router_env::opentelemetry::KeyValue::new( + "locker", "basilisk", + )], ) .await .map(|inner_card| { logger::debug!("card retrieved from basilisk locker"); + let _ = &metrics::CARD_LOCKER_SUCCESSFUL_RESPONSE.add( + &metrics::CONTEXT, + 1, + &[ + router_env::opentelemetry::KeyValue::new("locker", "basilisk"), + router_env::opentelemetry::KeyValue::new("operation", "get"), + ], + ); inner_card }), Ok(_) => { logger::debug!("card retrieved from rust locker"); + let _ = &metrics::CARD_LOCKER_SUCCESSFUL_RESPONSE.add( + &metrics::CONTEXT, + 1, + &[ + router_env::opentelemetry::KeyValue::new("locker", "rust"), + router_env::opentelemetry::KeyValue::new("operation", "get"), + ], + ); get_card_from_rs_locker_resp } } diff --git a/crates/router/src/core/payments.rs b/crates/router/src/core/payments.rs index db83dce487a6..33afa29397e1 100644 --- a/crates/router/src/core/payments.rs +++ b/crates/router/src/core/payments.rs @@ -1554,7 +1554,7 @@ fn check_apple_pay_metadata( }) }) .map_err( - |error| logger::error!(%error, "Failed to Parse Value to ApplepaySessionTokenData"), + |error| logger::warn!(%error, "Failed to Parse Value to ApplepaySessionTokenData"), ); parsed_metadata.ok().map(|metadata| match metadata { diff --git a/crates/router/src/core/payments/helpers.rs b/crates/router/src/core/payments/helpers.rs index 266792f98758..0cce91bebeeb 100644 --- a/crates/router/src/core/payments/helpers.rs +++ b/crates/router/src/core/payments/helpers.rs @@ -1354,7 +1354,6 @@ pub async fn retrieve_payment_method_with_temporary_token( state: &AppState, token: &str, payment_intent: &PaymentIntent, - card_cvc: Option>, merchant_key_store: &domain::MerchantKeyStore, card_token_data: Option<&CardToken>, ) -> RouterResult> { @@ -1381,23 +1380,27 @@ pub async fn retrieve_payment_method_with_temporary_token( let name_on_card = if card.card_holder_name.clone().expose().is_empty() { card_token_data - .and_then(|token_data| { + .and_then(|token_data| token_data.card_holder_name.clone()) + .filter(|name_on_card| !name_on_card.clone().expose().is_empty()) + .map(|name_on_card| { is_card_updated = true; - token_data.card_holder_name.clone() + name_on_card }) - .filter(|name_on_card| !name_on_card.clone().expose().is_empty()) - .ok_or(errors::ApiErrorResponse::MissingRequiredField { - field_name: "card_holder_name", - })? } else { - card.card_holder_name.clone() + Some(card.card_holder_name.clone()) }; - updated_card.card_holder_name = name_on_card; - if let Some(cvc) = card_cvc { - is_card_updated = true; - updated_card.card_cvc = cvc; + if let Some(name_on_card) = name_on_card { + updated_card.card_holder_name = name_on_card; + } + + if let Some(token_data) = card_token_data { + if let Some(cvc) = token_data.card_cvc.clone() { + is_card_updated = true; + updated_card.card_cvc = cvc; + } } + if is_card_updated { let updated_pm = api::PaymentMethodData::Card(updated_card); vault::Vault::store_payment_method_data_in_locker( @@ -1443,7 +1446,6 @@ pub async fn retrieve_card_with_permanent_token( state: &AppState, token: &str, payment_intent: &PaymentIntent, - card_cvc: Option>, card_token_data: Option<&CardToken>, ) -> RouterResult { let customer_id = payment_intent @@ -1478,7 +1480,11 @@ pub async fn retrieve_card_with_permanent_token( card_holder_name: name_on_card.unwrap_or(masking::Secret::from("".to_string())), card_exp_month: card.card_exp_month, card_exp_year: card.card_exp_year, - card_cvc: card_cvc.unwrap_or_default(), + card_cvc: card_token_data + .cloned() + .unwrap_or_default() + .card_cvc + .unwrap_or_default(), card_issuer: card.card_brand, nick_name: card.nick_name.map(masking::Secret::new), card_network: None, @@ -1500,6 +1506,22 @@ pub async fn make_pm_data<'a, F: Clone, R, Ctx: PaymentMethodRetrieve>( Option, )> { let request = &payment_data.payment_method_data.clone(); + + let mut card_token_data = payment_data + .payment_method_data + .clone() + .and_then(|pmd| match pmd { + api_models::payments::PaymentMethodData::CardToken(token_data) => Some(token_data), + _ => None, + }) + .or(Some(CardToken::default())); + + if let Some(cvc) = payment_data.card_cvc.clone() { + if let Some(token_data) = card_token_data.as_mut() { + token_data.card_cvc = Some(cvc); + } + } + let token = payment_data.token.clone(); let hyperswitch_token = match payment_data.mandate_id { @@ -1559,13 +1581,6 @@ pub async fn make_pm_data<'a, F: Clone, R, Ctx: PaymentMethodRetrieve>( } }; - let card_cvc = payment_data.card_cvc.clone(); - - let card_token_data = request.as_ref().and_then(|pmd| match pmd { - api_models::payments::PaymentMethodData::CardToken(token_data) => Some(token_data), - _ => None, - }); - // TODO: Handle case where payment method and token both are present in request properly. let payment_method = match (request, hyperswitch_token) { (_, Some(hyperswitch_token)) => { @@ -1574,8 +1589,7 @@ pub async fn make_pm_data<'a, F: Clone, R, Ctx: PaymentMethodRetrieve>( merchant_key_store, &hyperswitch_token, &payment_data.payment_intent, - card_cvc, - card_token_data, + card_token_data.as_ref(), ) .await .attach_printable("in 'make_pm_data'")?; @@ -2569,6 +2583,9 @@ mod tests { payment_confirm_source: None, surcharge_applicable: None, updated_by: storage_enums::MerchantStorageScheme::PostgresOnly.to_string(), + request_incremental_authorization: + common_enums::RequestIncrementalAuthorization::default(), + incremental_authorization_allowed: None, }; let req_cs = Some("1".to_string()); let merchant_fulfillment_time = Some(900); @@ -2619,6 +2636,9 @@ mod tests { payment_confirm_source: None, surcharge_applicable: None, updated_by: storage_enums::MerchantStorageScheme::PostgresOnly.to_string(), + request_incremental_authorization: + common_enums::RequestIncrementalAuthorization::default(), + incremental_authorization_allowed: None, }; let req_cs = Some("1".to_string()); let merchant_fulfillment_time = Some(10); @@ -2669,6 +2689,9 @@ mod tests { payment_confirm_source: None, surcharge_applicable: None, updated_by: storage_enums::MerchantStorageScheme::PostgresOnly.to_string(), + request_incremental_authorization: + common_enums::RequestIncrementalAuthorization::default(), + incremental_authorization_allowed: None, }; let req_cs = Some("1".to_string()); let merchant_fulfillment_time = Some(10); diff --git a/crates/router/src/core/payments/operations/payment_cancel.rs b/crates/router/src/core/payments/operations/payment_cancel.rs index d4605b47c438..ae7810971896 100644 --- a/crates/router/src/core/payments/operations/payment_cancel.rs +++ b/crates/router/src/core/payments/operations/payment_cancel.rs @@ -212,6 +212,7 @@ impl let payment_intent_update = storage::PaymentIntentUpdate::PGStatusUpdate { status: enums::IntentStatus::Cancelled, updated_by: storage_scheme.to_string(), + incremental_authorization_allowed: None, }; (Some(payment_intent_update), enums::AttemptStatus::Voided) } else { diff --git a/crates/router/src/core/payments/operations/payment_confirm.rs b/crates/router/src/core/payments/operations/payment_confirm.rs index 28b6dbec96ab..d718db79a6d0 100644 --- a/crates/router/src/core/payments/operations/payment_confirm.rs +++ b/crates/router/src/core/payments/operations/payment_confirm.rs @@ -419,6 +419,15 @@ impl .attach_printable("Error converting feature_metadata to Value")? .or(payment_intent.feature_metadata); payment_intent.metadata = request.metadata.clone().or(payment_intent.metadata); + payment_intent.request_incremental_authorization = request + .request_incremental_authorization + .map(|request_incremental_authorization| { + core_utils::get_request_incremental_authorization_value( + Some(request_incremental_authorization), + payment_attempt.capture_method, + ) + }) + .unwrap_or(Ok(payment_intent.request_incremental_authorization))?; payment_attempt.business_sub_label = request .business_sub_label .clone() diff --git a/crates/router/src/core/payments/operations/payment_create.rs b/crates/router/src/core/payments/operations/payment_create.rs index c12f28e23390..ac387076d1d1 100644 --- a/crates/router/src/core/payments/operations/payment_create.rs +++ b/crates/router/src/core/payments/operations/payment_create.rs @@ -713,6 +713,12 @@ impl PaymentCreate { let payment_link_id = payment_link_data.map(|pl_data| pl_data.payment_link_id); + let request_incremental_authorization = + core_utils::get_request_incremental_authorization_value( + request.request_incremental_authorization, + request.capture_method, + )?; + Ok(storage::PaymentIntentNew { payment_id: payment_id.to_string(), merchant_id: merchant_account.merchant_id.to_string(), @@ -749,6 +755,8 @@ impl PaymentCreate { payment_confirm_source: None, surcharge_applicable: None, updated_by: merchant_account.storage_scheme.to_string(), + request_incremental_authorization, + incremental_authorization_allowed: None, }) } diff --git a/crates/router/src/core/payments/operations/payment_response.rs b/crates/router/src/core/payments/operations/payment_response.rs index 2de5df38dba4..9781ad651ee2 100644 --- a/crates/router/src/core/payments/operations/payment_response.rs +++ b/crates/router/src/core/payments/operations/payment_response.rs @@ -418,8 +418,18 @@ async fn payment_response_update_tracker( redirection_data, connector_metadata, connector_response_reference_id, + incremental_authorization_allowed, .. } => { + payment_data + .payment_intent + .incremental_authorization_allowed = + core_utils::get_incremental_authorization_allowed_value( + incremental_authorization_allowed, + payment_data + .payment_intent + .request_incremental_authorization, + ); let connector_transaction_id = match resource_id { types::ResponseId::NoResponseId => None, types::ResponseId::ConnectorTransactionId(id) @@ -627,6 +637,7 @@ async fn payment_response_update_tracker( payment_data.payment_attempt.status, ), updated_by: storage_scheme.to_string(), + incremental_authorization_allowed: Some(false), }, Ok(_) => storage::PaymentIntentUpdate::ResponseUpdate { status: api_models::enums::IntentStatus::foreign_from( @@ -635,6 +646,9 @@ async fn payment_response_update_tracker( return_url: router_data.return_url.clone(), amount_captured, updated_by: storage_scheme.to_string(), + incremental_authorization_allowed: payment_data + .payment_intent + .incremental_authorization_allowed, }, }; diff --git a/crates/router/src/core/payments/routing.rs b/crates/router/src/core/payments/routing.rs index 841b48b9444a..96cd65615199 100644 --- a/crates/router/src/core/payments/routing.rs +++ b/crates/router/src/core/payments/routing.rs @@ -523,8 +523,10 @@ pub async fn refresh_kgraph_cache( .await .change_context(errors::RoutingError::KgraphCacheRefreshFailed)?; - merchant_connector_accounts - .retain(|mca| mca.connector_type != storage_enums::ConnectorType::PaymentVas); + merchant_connector_accounts.retain(|mca| { + mca.connector_type != storage_enums::ConnectorType::PaymentVas + && mca.connector_type != storage_enums::ConnectorType::PaymentMethodAuth + }); #[cfg(feature = "business_profile_routing")] let merchant_connector_accounts = payments_oss::helpers::filter_mca_based_on_business_profile( diff --git a/crates/router/src/core/payments/transformers.rs b/crates/router/src/core/payments/transformers.rs index 000bbb0fc00b..51e139c97988 100644 --- a/crates/router/src/core/payments/transformers.rs +++ b/crates/router/src/core/payments/transformers.rs @@ -1,6 +1,7 @@ use std::{fmt::Debug, marker::PhantomData, str::FromStr}; use api_models::payments::{FrmMessage, RequestSurchargeDetails}; +use common_enums::RequestIncrementalAuthorization; use common_utils::{consts::X_HS_LATENCY, fp_utils}; use diesel_models::ephemeral_key; use error_stack::{IntoReport, ResultExt}; @@ -80,6 +81,7 @@ where connector_metadata: None, network_txn_id: None, connector_response_reference_id: None, + incremental_authorization_allowed: None, }); let additional_data = PaymentAdditionalData { @@ -687,6 +689,9 @@ where .set_merchant_connector_id(payment_attempt.merchant_connector_id) .set_unified_code(payment_attempt.unified_code) .set_unified_message(payment_attempt.unified_message) + .set_incremental_authorization_allowed( + payment_intent.incremental_authorization_allowed, + ) .to_owned(), headers, )) @@ -749,6 +754,7 @@ where surcharge_details, unified_code: payment_attempt.unified_code, unified_message: payment_attempt.unified_message, + incremental_authorization_allowed: payment_intent.incremental_authorization_allowed, ..Default::default() }, headers, @@ -1036,6 +1042,12 @@ impl TryFrom> for types::PaymentsAuthoriz complete_authorize_url, customer_id: None, surcharge_details: payment_data.surcharge_details, + request_incremental_authorization: matches!( + payment_data + .payment_intent + .request_incremental_authorization, + RequestIncrementalAuthorization::True | RequestIncrementalAuthorization::Default + ), }) } } @@ -1274,6 +1286,12 @@ impl TryFrom> for types::SetupMandateRequ return_url: payment_data.payment_intent.return_url, browser_info, payment_method_type: attempt.payment_method_type, + request_incremental_authorization: matches!( + payment_data + .payment_intent + .request_incremental_authorization, + RequestIncrementalAuthorization::True | RequestIncrementalAuthorization::Default + ), }) } } diff --git a/crates/router/src/core/refunds.rs b/crates/router/src/core/refunds.rs index 2d572cee9513..c43c00b7259c 100644 --- a/crates/router/src/core/refunds.rs +++ b/crates/router/src/core/refunds.rs @@ -211,7 +211,10 @@ pub async fn trigger_refund_to_gateway( errors::ConnectorError::NotImplemented(message) => { Some(storage::RefundUpdate::ErrorUpdate { refund_status: Some(enums::RefundStatus::Failure), - refund_error_message: Some(message.to_string()), + refund_error_message: Some( + errors::ConnectorError::NotImplemented(message.to_owned()) + .to_string(), + ), refund_error_code: Some("NOT_IMPLEMENTED".to_string()), updated_by: storage_scheme.to_string(), }) @@ -927,7 +930,9 @@ pub async fn start_refund_workflow( ) -> Result<(), errors::ProcessTrackerError> { match refund_tracker.name.as_deref() { Some("EXECUTE_REFUND") => trigger_refund_execute_workflow(state, refund_tracker).await, - Some("SYNC_REFUND") => sync_refund_with_gateway_workflow(state, refund_tracker).await, + Some("SYNC_REFUND") => { + Box::pin(sync_refund_with_gateway_workflow(state, refund_tracker)).await + } _ => Err(errors::ProcessTrackerError::JobNotFound), } } diff --git a/crates/router/src/core/user.rs b/crates/router/src/core/user.rs index 94cd482a2291..7d0d599cc4ed 100644 --- a/crates/router/src/core/user.rs +++ b/crates/router/src/core/user.rs @@ -1,5 +1,5 @@ -use api_models::user as api; -use diesel_models::enums::UserStatus; +use api_models::user as user_api; +use diesel_models::{enums::UserStatus, user as storage_user}; use error_stack::{IntoReport, ResultExt}; use masking::{ExposeInterface, Secret}; use router_env::env; @@ -9,14 +9,19 @@ use crate::{ consts, db::user::UserInterface, routes::AppState, - services::{authentication::UserFromToken, ApplicationResponse}, + services::{authentication as auth, ApplicationResponse}, types::domain, + utils, }; +#[cfg(feature = "dummy_connector")] +pub mod sample_data; + +pub mod dashboard_metadata; pub async fn connect_account( state: AppState, - request: api::ConnectAccountRequest, -) -> UserResponse { + request: user_api::ConnectAccountRequest, +) -> UserResponse { let find_user = state .store .find_user_by_email(request.email.clone().expose().expose().as_str()) @@ -32,15 +37,17 @@ pub async fn connect_account( .get_jwt_auth_token(state.clone(), user_role.org_id) .await?; - return Ok(ApplicationResponse::Json(api::ConnectAccountResponse { - token: Secret::new(jwt_token), - merchant_id: user_role.merchant_id, - name: user_from_db.get_name(), - email: user_from_db.get_email(), - verification_days_left: None, - user_role: user_role.role_id, - user_id: user_from_db.get_user_id().to_string(), - })); + return Ok(ApplicationResponse::Json( + user_api::ConnectAccountResponse { + token: Secret::new(jwt_token), + merchant_id: user_role.merchant_id, + name: user_from_db.get_name(), + email: user_from_db.get_email(), + verification_days_left: None, + user_role: user_role.role_id, + user_id: user_from_db.get_user_id().to_string(), + }, + )); } else if find_user .map_err(|e| e.current_context().is_db_not_found()) .err() @@ -62,7 +69,7 @@ pub async fn connect_account( let user_role = new_user .insert_user_role_in_db( state.clone(), - consts::ROLE_ID_ORGANIZATION_ADMIN.to_string(), + consts::user_role::ROLE_ID_ORGANIZATION_ADMIN.to_string(), UserStatus::Active, ) .await?; @@ -70,15 +77,40 @@ pub async fn connect_account( .get_jwt_auth_token(state.clone(), user_role.org_id) .await?; - return Ok(ApplicationResponse::Json(api::ConnectAccountResponse { - token: Secret::new(jwt_token), - merchant_id: user_role.merchant_id, - name: user_from_db.get_name(), - email: user_from_db.get_email(), - verification_days_left: None, - user_role: user_role.role_id, - user_id: user_from_db.get_user_id().to_string(), - })); + #[cfg(feature = "email")] + { + use router_env::logger; + + use crate::services::email::types as email_types; + + let email_contents = email_types::VerifyEmail { + recipient_email: domain::UserEmail::from_pii_email(user_from_db.get_email())?, + settings: state.conf.clone(), + subject: "Welcome to the Hyperswitch community!", + }; + + let send_email_result = state + .email_client + .compose_and_send_email( + Box::new(email_contents), + state.conf.proxy.https_url.as_ref(), + ) + .await; + + logger::info!(?send_email_result); + } + + return Ok(ApplicationResponse::Json( + user_api::ConnectAccountResponse { + token: Secret::new(jwt_token), + merchant_id: user_role.merchant_id, + name: user_from_db.get_name(), + email: user_from_db.get_email(), + verification_days_left: None, + user_role: user_role.role_id, + user_id: user_from_db.get_user_id().to_string(), + }, + )); } else { Err(UserErrors::InternalServerError.into()) } @@ -86,8 +118,8 @@ pub async fn connect_account( pub async fn change_password( state: AppState, - request: api::ChangePasswordRequest, - user_from_token: UserFromToken, + request: user_api::ChangePasswordRequest, + user_from_token: auth::UserFromToken, ) -> UserResponse<()> { let user: domain::UserFromStorage = UserInterface::find_user_by_id(&*state.store, &user_from_token.user_id) @@ -115,3 +147,206 @@ pub async fn change_password( Ok(ApplicationResponse::StatusOk) } + +pub async fn create_internal_user( + state: AppState, + request: user_api::CreateInternalUserRequest, +) -> UserResponse<()> { + let new_user = domain::NewUser::try_from(request)?; + + let mut store_user: storage_user::UserNew = new_user.clone().try_into()?; + store_user.set_is_verified(true); + + let key_store = state + .store + .get_merchant_key_store_by_merchant_id( + consts::user_role::INTERNAL_USER_MERCHANT_ID, + &state.store.get_master_key().to_vec().into(), + ) + .await + .map_err(|e| { + if e.current_context().is_db_not_found() { + e.change_context(UserErrors::MerchantIdNotFound) + } else { + e.change_context(UserErrors::InternalServerError) + } + })?; + + state + .store + .find_merchant_account_by_merchant_id( + consts::user_role::INTERNAL_USER_MERCHANT_ID, + &key_store, + ) + .await + .map_err(|e| { + if e.current_context().is_db_not_found() { + e.change_context(UserErrors::MerchantIdNotFound) + } else { + e.change_context(UserErrors::InternalServerError) + } + })?; + + state + .store + .insert_user(store_user) + .await + .map_err(|e| { + if e.current_context().is_db_unique_violation() { + e.change_context(UserErrors::UserExists) + } else { + e.change_context(UserErrors::InternalServerError) + } + }) + .map(domain::user::UserFromStorage::from)?; + + new_user + .insert_user_role_in_db( + state, + consts::user_role::ROLE_ID_INTERNAL_VIEW_ONLY_USER.to_string(), + UserStatus::Active, + ) + .await?; + + Ok(ApplicationResponse::StatusOk) +} + +pub async fn switch_merchant_id( + state: AppState, + request: user_api::SwitchMerchantIdRequest, + user_from_token: auth::UserFromToken, +) -> UserResponse { + if !utils::user_role::is_internal_role(&user_from_token.role_id) { + let merchant_list = + utils::user_role::get_merchant_ids_for_user(state.clone(), &user_from_token.user_id) + .await?; + if !merchant_list.contains(&request.merchant_id) { + return Err(UserErrors::InvalidRoleOperation.into()) + .attach_printable("User doesn't have access to switch"); + } + } + + if user_from_token.merchant_id == request.merchant_id { + return Err(UserErrors::InvalidRoleOperation.into()) + .attach_printable("User switch to same merchant id."); + } + + let user = state + .store + .find_user_by_id(&user_from_token.user_id) + .await + .change_context(UserErrors::InternalServerError)?; + + let key_store = state + .store + .get_merchant_key_store_by_merchant_id( + request.merchant_id.as_str(), + &state.store.get_master_key().to_vec().into(), + ) + .await + .map_err(|e| { + if e.current_context().is_db_not_found() { + e.change_context(UserErrors::MerchantIdNotFound) + } else { + e.change_context(UserErrors::InternalServerError) + } + })?; + + let org_id = state + .store + .find_merchant_account_by_merchant_id(request.merchant_id.as_str(), &key_store) + .await + .map_err(|e| { + if e.current_context().is_db_not_found() { + e.change_context(UserErrors::MerchantIdNotFound) + } else { + e.change_context(UserErrors::InternalServerError) + } + })? + .organization_id; + + let user = domain::UserFromStorage::from(user); + let user_role = state + .store + .find_user_role_by_user_id(user.get_user_id()) + .await + .change_context(UserErrors::InternalServerError)?; + + let token = Box::pin(user.get_jwt_auth_token_with_custom_merchant_id( + state.clone(), + request.merchant_id.clone(), + org_id, + )) + .await? + .into(); + + Ok(ApplicationResponse::Json( + user_api::ConnectAccountResponse { + merchant_id: request.merchant_id, + token, + name: user.get_name(), + email: user.get_email(), + user_id: user.get_user_id().to_string(), + verification_days_left: None, + user_role: user_role.role_id, + }, + )) +} + +pub async fn create_merchant_account( + state: AppState, + user_from_token: auth::UserFromToken, + req: user_api::UserMerchantCreate, +) -> UserResponse<()> { + let user_from_db: domain::UserFromStorage = + user_from_token.get_user(state.clone()).await?.into(); + + let new_user = domain::NewUser::try_from((user_from_db, req, user_from_token))?; + let new_merchant = new_user.get_new_merchant(); + new_merchant + .create_new_merchant_and_insert_in_db(state.to_owned()) + .await?; + + let role_insertion_res = new_user + .insert_user_role_in_db( + state.clone(), + consts::user_role::ROLE_ID_ORGANIZATION_ADMIN.to_string(), + UserStatus::Active, + ) + .await; + if let Err(e) = role_insertion_res { + let _ = state + .store + .delete_merchant_account_by_merchant_id(new_merchant.get_merchant_id().as_str()) + .await; + return Err(e); + } + + Ok(ApplicationResponse::StatusOk) +} + +pub async fn list_merchant_ids_for_user( + state: AppState, + user: auth::UserFromToken, +) -> UserResponse> { + Ok(ApplicationResponse::Json( + utils::user::get_merchant_ids_for_user(state, &user.user_id).await?, + )) +} + +pub async fn get_users_for_merchant_account( + state: AppState, + user_from_token: auth::UserFromToken, +) -> UserResponse { + let users = state + .store + .find_users_and_roles_by_merchant_id(user_from_token.merchant_id.as_str()) + .await + .change_context(UserErrors::InternalServerError) + .attach_printable("No users for given merchant id")? + .into_iter() + .filter_map(|(user, role)| domain::UserAndRoleJoined(user, role).try_into().ok()) + .collect(); + + Ok(ApplicationResponse::Json(user_api::GetUsersResponse(users))) +} diff --git a/crates/router/src/core/user/dashboard_metadata.rs b/crates/router/src/core/user/dashboard_metadata.rs new file mode 100644 index 000000000000..de385fb8ed65 --- /dev/null +++ b/crates/router/src/core/user/dashboard_metadata.rs @@ -0,0 +1,537 @@ +use api_models::user::dashboard_metadata::{self as api, GetMultipleMetaDataPayload}; +use diesel_models::{ + enums::DashboardMetadata as DBEnum, user::dashboard_metadata::DashboardMetadata, +}; +use error_stack::ResultExt; + +use crate::{ + core::errors::{UserErrors, UserResponse, UserResult}, + routes::AppState, + services::{authentication::UserFromToken, ApplicationResponse}, + types::domain::{user::dashboard_metadata as types, MerchantKeyStore}, + utils::user::dashboard_metadata as utils, +}; + +pub async fn set_metadata( + state: AppState, + user: UserFromToken, + request: api::SetMetaDataRequest, +) -> UserResponse<()> { + let metadata_value = parse_set_request(request)?; + let metadata_key = DBEnum::from(&metadata_value); + + insert_metadata(&state, user, metadata_key, metadata_value).await?; + + Ok(ApplicationResponse::StatusOk) +} + +pub async fn get_multiple_metadata( + state: AppState, + user: UserFromToken, + request: GetMultipleMetaDataPayload, +) -> UserResponse> { + let metadata_keys: Vec = request.results.into_iter().map(parse_get_request).collect(); + + let metadata = fetch_metadata(&state, &user, metadata_keys.clone()).await?; + + let mut response = Vec::with_capacity(metadata_keys.len()); + for key in metadata_keys { + let data = metadata.iter().find(|ele| ele.data_key == key); + let resp; + if data.is_none() && utils::is_backfill_required(&key) { + let backfill_data = backfill_metadata(&state, &user, &key).await?; + resp = into_response(backfill_data.as_ref(), &key)?; + } else { + resp = into_response(data, &key)?; + } + response.push(resp); + } + + Ok(ApplicationResponse::Json(response)) +} + +fn parse_set_request(data_enum: api::SetMetaDataRequest) -> UserResult { + match data_enum { + api::SetMetaDataRequest::ProductionAgreement(req) => { + let ip_address = req + .ip_address + .ok_or(UserErrors::InternalServerError.into()) + .attach_printable("Error Getting Ip Address")?; + Ok(types::MetaData::ProductionAgreement( + types::ProductionAgreementValue { + version: req.version, + ip_address, + timestamp: common_utils::date_time::now(), + }, + )) + } + api::SetMetaDataRequest::SetupProcessor(req) => Ok(types::MetaData::SetupProcessor(req)), + api::SetMetaDataRequest::ConfigureEndpoint => Ok(types::MetaData::ConfigureEndpoint(true)), + api::SetMetaDataRequest::SetupComplete => Ok(types::MetaData::SetupComplete(true)), + api::SetMetaDataRequest::FirstProcessorConnected(req) => { + Ok(types::MetaData::FirstProcessorConnected(req)) + } + api::SetMetaDataRequest::SecondProcessorConnected(req) => { + Ok(types::MetaData::SecondProcessorConnected(req)) + } + api::SetMetaDataRequest::ConfiguredRouting(req) => { + Ok(types::MetaData::ConfiguredRouting(req)) + } + api::SetMetaDataRequest::TestPayment(req) => Ok(types::MetaData::TestPayment(req)), + api::SetMetaDataRequest::IntegrationMethod(req) => { + Ok(types::MetaData::IntegrationMethod(req)) + } + api::SetMetaDataRequest::IntegrationCompleted => { + Ok(types::MetaData::IntegrationCompleted(true)) + } + api::SetMetaDataRequest::SPRoutingConfigured(req) => { + Ok(types::MetaData::SPRoutingConfigured(req)) + } + api::SetMetaDataRequest::SPTestPayment => Ok(types::MetaData::SPTestPayment(true)), + api::SetMetaDataRequest::DownloadWoocom => Ok(types::MetaData::DownloadWoocom(true)), + api::SetMetaDataRequest::ConfigureWoocom => Ok(types::MetaData::ConfigureWoocom(true)), + api::SetMetaDataRequest::SetupWoocomWebhook => { + Ok(types::MetaData::SetupWoocomWebhook(true)) + } + api::SetMetaDataRequest::IsMultipleConfiguration => { + Ok(types::MetaData::IsMultipleConfiguration(true)) + } + } +} + +fn parse_get_request(data_enum: api::GetMetaDataRequest) -> DBEnum { + match data_enum { + api::GetMetaDataRequest::ProductionAgreement => DBEnum::ProductionAgreement, + api::GetMetaDataRequest::SetupProcessor => DBEnum::SetupProcessor, + api::GetMetaDataRequest::ConfigureEndpoint => DBEnum::ConfigureEndpoint, + api::GetMetaDataRequest::SetupComplete => DBEnum::SetupComplete, + api::GetMetaDataRequest::FirstProcessorConnected => DBEnum::FirstProcessorConnected, + api::GetMetaDataRequest::SecondProcessorConnected => DBEnum::SecondProcessorConnected, + api::GetMetaDataRequest::ConfiguredRouting => DBEnum::ConfiguredRouting, + api::GetMetaDataRequest::TestPayment => DBEnum::TestPayment, + api::GetMetaDataRequest::IntegrationMethod => DBEnum::IntegrationMethod, + api::GetMetaDataRequest::IntegrationCompleted => DBEnum::IntegrationCompleted, + api::GetMetaDataRequest::StripeConnected => DBEnum::StripeConnected, + api::GetMetaDataRequest::PaypalConnected => DBEnum::PaypalConnected, + api::GetMetaDataRequest::SPRoutingConfigured => DBEnum::SpRoutingConfigured, + api::GetMetaDataRequest::SPTestPayment => DBEnum::SpTestPayment, + api::GetMetaDataRequest::DownloadWoocom => DBEnum::DownloadWoocom, + api::GetMetaDataRequest::ConfigureWoocom => DBEnum::ConfigureWoocom, + api::GetMetaDataRequest::SetupWoocomWebhook => DBEnum::SetupWoocomWebhook, + api::GetMetaDataRequest::IsMultipleConfiguration => DBEnum::IsMultipleConfiguration, + } +} + +fn into_response( + data: Option<&DashboardMetadata>, + data_type: &DBEnum, +) -> UserResult { + match data_type { + DBEnum::ProductionAgreement => Ok(api::GetMetaDataResponse::ProductionAgreement( + data.is_some(), + )), + DBEnum::SetupProcessor => { + let resp = utils::deserialize_to_response(data)?; + Ok(api::GetMetaDataResponse::SetupProcessor(resp)) + } + DBEnum::ConfigureEndpoint => { + Ok(api::GetMetaDataResponse::ConfigureEndpoint(data.is_some())) + } + DBEnum::SetupComplete => Ok(api::GetMetaDataResponse::SetupComplete(data.is_some())), + DBEnum::FirstProcessorConnected => { + let resp = utils::deserialize_to_response(data)?; + Ok(api::GetMetaDataResponse::FirstProcessorConnected(resp)) + } + DBEnum::SecondProcessorConnected => { + let resp = utils::deserialize_to_response(data)?; + Ok(api::GetMetaDataResponse::SecondProcessorConnected(resp)) + } + DBEnum::ConfiguredRouting => { + let resp = utils::deserialize_to_response(data)?; + Ok(api::GetMetaDataResponse::ConfiguredRouting(resp)) + } + DBEnum::TestPayment => { + let resp = utils::deserialize_to_response(data)?; + Ok(api::GetMetaDataResponse::TestPayment(resp)) + } + DBEnum::IntegrationMethod => { + let resp = utils::deserialize_to_response(data)?; + Ok(api::GetMetaDataResponse::IntegrationMethod(resp)) + } + DBEnum::IntegrationCompleted => Ok(api::GetMetaDataResponse::IntegrationCompleted( + data.is_some(), + )), + DBEnum::StripeConnected => { + let resp = utils::deserialize_to_response(data)?; + Ok(api::GetMetaDataResponse::StripeConnected(resp)) + } + DBEnum::PaypalConnected => { + let resp = utils::deserialize_to_response(data)?; + Ok(api::GetMetaDataResponse::PaypalConnected(resp)) + } + DBEnum::SpRoutingConfigured => { + let resp = utils::deserialize_to_response(data)?; + Ok(api::GetMetaDataResponse::SPRoutingConfigured(resp)) + } + DBEnum::SpTestPayment => Ok(api::GetMetaDataResponse::SPTestPayment(data.is_some())), + DBEnum::DownloadWoocom => Ok(api::GetMetaDataResponse::DownloadWoocom(data.is_some())), + DBEnum::ConfigureWoocom => Ok(api::GetMetaDataResponse::ConfigureWoocom(data.is_some())), + DBEnum::SetupWoocomWebhook => { + Ok(api::GetMetaDataResponse::SetupWoocomWebhook(data.is_some())) + } + + DBEnum::IsMultipleConfiguration => Ok(api::GetMetaDataResponse::IsMultipleConfiguration( + data.is_some(), + )), + } +} + +async fn insert_metadata( + state: &AppState, + user: UserFromToken, + metadata_key: DBEnum, + metadata_value: types::MetaData, +) -> UserResult { + match metadata_value { + types::MetaData::ProductionAgreement(data) => { + utils::insert_merchant_scoped_metadata_to_db( + state, + user.user_id, + user.merchant_id, + user.org_id, + metadata_key, + data, + ) + .await + } + types::MetaData::SetupProcessor(data) => { + utils::insert_merchant_scoped_metadata_to_db( + state, + user.user_id, + user.merchant_id, + user.org_id, + metadata_key, + data, + ) + .await + } + types::MetaData::ConfigureEndpoint(data) => { + utils::insert_merchant_scoped_metadata_to_db( + state, + user.user_id, + user.merchant_id, + user.org_id, + metadata_key, + data, + ) + .await + } + types::MetaData::SetupComplete(data) => { + utils::insert_merchant_scoped_metadata_to_db( + state, + user.user_id, + user.merchant_id, + user.org_id, + metadata_key, + data, + ) + .await + } + types::MetaData::FirstProcessorConnected(data) => { + utils::insert_merchant_scoped_metadata_to_db( + state, + user.user_id, + user.merchant_id, + user.org_id, + metadata_key, + data, + ) + .await + } + types::MetaData::SecondProcessorConnected(data) => { + utils::insert_merchant_scoped_metadata_to_db( + state, + user.user_id, + user.merchant_id, + user.org_id, + metadata_key, + data, + ) + .await + } + types::MetaData::ConfiguredRouting(data) => { + utils::insert_merchant_scoped_metadata_to_db( + state, + user.user_id, + user.merchant_id, + user.org_id, + metadata_key, + data, + ) + .await + } + types::MetaData::TestPayment(data) => { + utils::insert_merchant_scoped_metadata_to_db( + state, + user.user_id, + user.merchant_id, + user.org_id, + metadata_key, + data, + ) + .await + } + types::MetaData::IntegrationMethod(data) => { + utils::insert_merchant_scoped_metadata_to_db( + state, + user.user_id, + user.merchant_id, + user.org_id, + metadata_key, + data, + ) + .await + } + types::MetaData::IntegrationCompleted(data) => { + utils::insert_merchant_scoped_metadata_to_db( + state, + user.user_id, + user.merchant_id, + user.org_id, + metadata_key, + data, + ) + .await + } + types::MetaData::StripeConnected(data) => { + utils::insert_merchant_scoped_metadata_to_db( + state, + user.user_id, + user.merchant_id, + user.org_id, + metadata_key, + data, + ) + .await + } + types::MetaData::PaypalConnected(data) => { + utils::insert_merchant_scoped_metadata_to_db( + state, + user.user_id, + user.merchant_id, + user.org_id, + metadata_key, + data, + ) + .await + } + types::MetaData::SPRoutingConfigured(data) => { + utils::insert_merchant_scoped_metadata_to_db( + state, + user.user_id, + user.merchant_id, + user.org_id, + metadata_key, + data, + ) + .await + } + types::MetaData::SPTestPayment(data) => { + utils::insert_merchant_scoped_metadata_to_db( + state, + user.user_id, + user.merchant_id, + user.org_id, + metadata_key, + data, + ) + .await + } + types::MetaData::DownloadWoocom(data) => { + utils::insert_merchant_scoped_metadata_to_db( + state, + user.user_id, + user.merchant_id, + user.org_id, + metadata_key, + data, + ) + .await + } + types::MetaData::ConfigureWoocom(data) => { + utils::insert_merchant_scoped_metadata_to_db( + state, + user.user_id, + user.merchant_id, + user.org_id, + metadata_key, + data, + ) + .await + } + types::MetaData::SetupWoocomWebhook(data) => { + utils::insert_merchant_scoped_metadata_to_db( + state, + user.user_id, + user.merchant_id, + user.org_id, + metadata_key, + data, + ) + .await + } + types::MetaData::IsMultipleConfiguration(data) => { + utils::insert_merchant_scoped_metadata_to_db( + state, + user.user_id, + user.merchant_id, + user.org_id, + metadata_key, + data, + ) + .await + } + } +} + +async fn fetch_metadata( + state: &AppState, + user: &UserFromToken, + metadata_keys: Vec, +) -> UserResult> { + let mut dashboard_metadata = Vec::with_capacity(metadata_keys.len()); + let (merchant_scoped_enums, _) = utils::separate_metadata_type_based_on_scope(metadata_keys); + + if !merchant_scoped_enums.is_empty() { + let mut res = utils::get_merchant_scoped_metadata_from_db( + state, + user.merchant_id.to_owned(), + user.org_id.to_owned(), + merchant_scoped_enums, + ) + .await?; + dashboard_metadata.append(&mut res); + } + + Ok(dashboard_metadata) +} + +pub async fn backfill_metadata( + state: &AppState, + user: &UserFromToken, + key: &DBEnum, +) -> UserResult> { + let key_store = state + .store + .get_merchant_key_store_by_merchant_id( + &user.merchant_id, + &state.store.get_master_key().to_vec().into(), + ) + .await + .change_context(UserErrors::InternalServerError)?; + + match key { + DBEnum::StripeConnected => { + let mca = if let Some(stripe_connected) = get_merchant_connector_account_by_name( + state, + &user.merchant_id, + api_models::enums::RoutableConnectors::Stripe + .to_string() + .as_str(), + &key_store, + ) + .await? + { + stripe_connected + } else if let Some(stripe_test_connected) = get_merchant_connector_account_by_name( + state, + &user.merchant_id, + //TODO: Use Enum with proper feature flag + "stripe_test", + &key_store, + ) + .await? + { + stripe_test_connected + } else { + return Ok(None); + }; + + Some( + insert_metadata( + state, + user.to_owned(), + DBEnum::StripeConnected, + types::MetaData::StripeConnected(api::ProcessorConnected { + processor_id: mca.merchant_connector_id, + processor_name: mca.connector_name, + }), + ) + .await, + ) + .transpose() + } + DBEnum::PaypalConnected => { + let mca = if let Some(paypal_connected) = get_merchant_connector_account_by_name( + state, + &user.merchant_id, + api_models::enums::RoutableConnectors::Paypal + .to_string() + .as_str(), + &key_store, + ) + .await? + { + paypal_connected + } else if let Some(paypal_test_connected) = get_merchant_connector_account_by_name( + state, + &user.merchant_id, + //TODO: Use Enum with proper feature flag + "paypal_test", + &key_store, + ) + .await? + { + paypal_test_connected + } else { + return Ok(None); + }; + + Some( + insert_metadata( + state, + user.to_owned(), + DBEnum::PaypalConnected, + types::MetaData::PaypalConnected(api::ProcessorConnected { + processor_id: mca.merchant_connector_id, + processor_name: mca.connector_name, + }), + ) + .await, + ) + .transpose() + } + _ => Ok(None), + } +} + +pub async fn get_merchant_connector_account_by_name( + state: &AppState, + merchant_id: &str, + connector_name: &str, + key_store: &MerchantKeyStore, +) -> UserResult> { + state + .store + .find_merchant_connector_account_by_merchant_id_connector_name( + merchant_id, + connector_name, + key_store, + ) + .await + .map_err(|e| { + e.change_context(UserErrors::InternalServerError) + .attach_printable("DB Error Fetching DashboardMetaData") + }) + .map(|data| data.first().cloned()) +} diff --git a/crates/router/src/core/user/sample_data.rs b/crates/router/src/core/user/sample_data.rs new file mode 100644 index 000000000000..19b7d3bd815c --- /dev/null +++ b/crates/router/src/core/user/sample_data.rs @@ -0,0 +1,82 @@ +use api_models::user::sample_data::SampleDataRequest; +use common_utils::errors::ReportSwitchExt; +use data_models::payments::payment_intent::PaymentIntentNew; +use diesel_models::{user::sample_data::PaymentAttemptBatchNew, RefundNew}; + +pub type SampleDataApiResponse = SampleDataResult>; + +use crate::{ + core::errors::sample_data::SampleDataResult, + routes::AppState, + services::{authentication::UserFromToken, ApplicationResponse}, + utils::user::sample_data::generate_sample_data, +}; + +pub async fn generate_sample_data_for_user( + state: AppState, + user_from_token: UserFromToken, + req: SampleDataRequest, +) -> SampleDataApiResponse<()> { + let sample_data = + generate_sample_data(&state, req, user_from_token.merchant_id.as_str()).await?; + + let (payment_intents, payment_attempts, refunds): ( + Vec, + Vec, + Vec, + ) = sample_data.into_iter().fold( + (Vec::new(), Vec::new(), Vec::new()), + |(mut pi, mut pa, mut rf), (payment_intent, payment_attempt, refund)| { + pi.push(payment_intent); + pa.push(payment_attempt); + if let Some(refund) = refund { + rf.push(refund); + } + (pi, pa, rf) + }, + ); + + state + .store + .insert_payment_intents_batch_for_sample_data(payment_intents) + .await + .switch()?; + state + .store + .insert_payment_attempts_batch_for_sample_data(payment_attempts) + .await + .switch()?; + state + .store + .insert_refunds_batch_for_sample_data(refunds) + .await + .switch()?; + + Ok(ApplicationResponse::StatusOk) +} + +pub async fn delete_sample_data_for_user( + state: AppState, + user_from_token: UserFromToken, + _req: SampleDataRequest, +) -> SampleDataApiResponse<()> { + let merchant_id_del = user_from_token.merchant_id.as_str(); + + state + .store + .delete_payment_intents_for_sample_data(merchant_id_del) + .await + .switch()?; + state + .store + .delete_payment_attempts_for_sample_data(merchant_id_del) + .await + .switch()?; + state + .store + .delete_refunds_for_sample_data(merchant_id_del) + .await + .switch()?; + + Ok(ApplicationResponse::StatusOk) +} diff --git a/crates/router/src/core/user_role.rs b/crates/router/src/core/user_role.rs new file mode 100644 index 000000000000..2b7752d1904b --- /dev/null +++ b/crates/router/src/core/user_role.rs @@ -0,0 +1,101 @@ +use api_models::user_role as user_role_api; +use diesel_models::user_role::UserRoleUpdate; +use error_stack::ResultExt; + +use crate::{ + core::errors::{UserErrors, UserResponse}, + routes::AppState, + services::{ + authentication::{self as auth}, + authorization::{info, predefined_permissions}, + ApplicationResponse, + }, + utils, +}; + +pub async fn get_authorization_info( + _state: AppState, +) -> UserResponse { + Ok(ApplicationResponse::Json( + user_role_api::AuthorizationInfoResponse( + info::get_authorization_info() + .into_iter() + .filter_map(|module| module.try_into().ok()) + .collect(), + ), + )) +} + +pub async fn list_roles(_state: AppState) -> UserResponse { + Ok(ApplicationResponse::Json(user_role_api::ListRolesResponse( + predefined_permissions::PREDEFINED_PERMISSIONS + .iter() + .filter_map(|(role_id, role_info)| { + utils::user_role::get_role_name_and_permission_response(role_info).map( + |(permissions, role_name)| user_role_api::RoleInfoResponse { + permissions, + role_id, + role_name, + }, + ) + }) + .collect(), + ))) +} + +pub async fn get_role( + _state: AppState, + role: user_role_api::GetRoleRequest, +) -> UserResponse { + let info = predefined_permissions::PREDEFINED_PERMISSIONS + .get_key_value(role.role_id.as_str()) + .and_then(|(role_id, role_info)| { + utils::user_role::get_role_name_and_permission_response(role_info).map( + |(permissions, role_name)| user_role_api::RoleInfoResponse { + permissions, + role_id, + role_name, + }, + ) + }) + .ok_or(UserErrors::InvalidRoleId)?; + + Ok(ApplicationResponse::Json(info)) +} + +pub async fn update_user_role( + state: AppState, + user_from_token: auth::UserFromToken, + req: user_role_api::UpdateUserRoleRequest, +) -> UserResponse<()> { + let merchant_id = user_from_token.merchant_id; + let role_id = req.role_id.clone(); + utils::user_role::validate_role_id(role_id.as_str())?; + + if user_from_token.user_id == req.user_id { + return Err(UserErrors::InvalidRoleOperation.into()) + .attach_printable("Admin User Changing their role"); + } + + state + .store + .update_user_role_by_user_id_merchant_id( + req.user_id.as_str(), + merchant_id.as_str(), + UserRoleUpdate::UpdateRole { + role_id, + modified_by: user_from_token.user_id, + }, + ) + .await + .map_err(|e| { + if e.current_context().is_db_not_found() { + return e + .change_context(UserErrors::InvalidRoleOperation) + .attach_printable("UserId MerchantId not found"); + } + e.change_context(UserErrors::InternalServerError) + })?; + + Ok(ApplicationResponse::StatusOk) +} diff --git a/crates/router/src/core/utils.rs b/crates/router/src/core/utils.rs index 5207e4ba8079..670c25c814ed 100644 --- a/crates/router/src/core/utils.rs +++ b/crates/router/src/core/utils.rs @@ -4,6 +4,7 @@ use api_models::{ enums::{DisputeStage, DisputeStatus}, payment_methods::{SurchargeDetailsResponse, SurchargeMetadata}, }; +use common_enums::RequestIncrementalAuthorization; #[cfg(feature = "payouts")] use common_utils::{crypto::Encryptable, pii::Email}; use common_utils::{ @@ -1133,3 +1134,32 @@ pub async fn get_individual_surcharge_detail_from_redis( .get_hash_field_and_deserialize(&redis_key, &value_key, "SurchargeDetailsResponse") .await } + +pub fn get_request_incremental_authorization_value( + request_incremental_authorization: Option, + capture_method: Option, +) -> RouterResult { + request_incremental_authorization + .map(|request_incremental_authorization| { + if request_incremental_authorization { + if capture_method == Some(common_enums::CaptureMethod::Automatic) { + Err(errors::ApiErrorResponse::NotSupported { message: "incremental authorization is not supported when capture_method is automatic".to_owned() }).into_report()? + } + Ok(RequestIncrementalAuthorization::True) + } else { + Ok(RequestIncrementalAuthorization::False) + } + }) + .unwrap_or(Ok(RequestIncrementalAuthorization::default())) +} + +pub fn get_incremental_authorization_allowed_value( + incremental_authorization_allowed: Option, + request_incremental_authorization: RequestIncrementalAuthorization, +) -> Option { + if request_incremental_authorization == common_enums::RequestIncrementalAuthorization::False { + Some(false) + } else { + incremental_authorization_allowed + } +} diff --git a/crates/router/src/core/verify_connector.rs b/crates/router/src/core/verify_connector.rs new file mode 100644 index 000000000000..e837e8b8b259 --- /dev/null +++ b/crates/router/src/core/verify_connector.rs @@ -0,0 +1,63 @@ +use api_models::{enums::Connector, verify_connector::VerifyConnectorRequest}; +use error_stack::{IntoReport, ResultExt}; + +use crate::{ + connector, + core::errors, + services, + types::{ + api, + api::verify_connector::{self as types, VerifyConnector}, + }, + utils::verify_connector as utils, + AppState, +}; + +pub async fn verify_connector_credentials( + state: AppState, + req: VerifyConnectorRequest, +) -> errors::RouterResponse<()> { + let boxed_connector = api::ConnectorData::get_connector_by_name( + &state.conf.connectors, + &req.connector_name.to_string(), + api::GetToken::Connector, + None, + ) + .change_context(errors::ApiErrorResponse::IncorrectConnectorNameGiven)?; + + let card_details = utils::get_test_card_details(req.connector_name)? + .ok_or(errors::ApiErrorResponse::FlowNotSupported { + flow: "Verify credentials".to_string(), + connector: req.connector_name.to_string(), + }) + .into_report()?; + + match req.connector_name { + Connector::Stripe => { + connector::Stripe::verify( + &state, + types::VerifyConnectorData { + connector: *boxed_connector.connector, + connector_auth: req.connector_account_details.into(), + card_details, + }, + ) + .await + } + Connector::Paypal => connector::Paypal::get_access_token( + &state, + types::VerifyConnectorData { + connector: *boxed_connector.connector, + connector_auth: req.connector_account_details.into(), + card_details, + }, + ) + .await + .map(|_| services::ApplicationResponse::StatusOk), + _ => Err(errors::ApiErrorResponse::FlowNotSupported { + flow: "Verify credentials".to_string(), + connector: req.connector_name.to_string(), + }) + .into_report(), + } +} diff --git a/crates/router/src/core/webhooks.rs b/crates/router/src/core/webhooks.rs index 67154ae33aef..be8d118a47c2 100644 --- a/crates/router/src/core/webhooks.rs +++ b/crates/router/src/core/webhooks.rs @@ -905,6 +905,7 @@ pub async fn webhooks_wrapper { diff --git a/crates/router/src/db.rs b/crates/router/src/db.rs index 9687f7f97c92..6558cc6ace50 100644 --- a/crates/router/src/db.rs +++ b/crates/router/src/db.rs @@ -6,12 +6,14 @@ pub mod capture; pub mod cards_info; pub mod configs; pub mod customers; +pub mod dashboard_metadata; pub mod dispute; pub mod ephemeral_key; pub mod events; pub mod file; pub mod fraud_check; pub mod gsm; +mod kafka_store; pub mod locker_mock_up; pub mod mandate; pub mod merchant_account; @@ -31,11 +33,24 @@ pub mod user_role; use data_models::payments::{ payment_attempt::PaymentAttemptInterface, payment_intent::PaymentIntentInterface, }; +use diesel_models::{ + fraud_check::{FraudCheck, FraudCheckNew, FraudCheckUpdate}, + organization::{Organization, OrganizationNew, OrganizationUpdate}, +}; +use error_stack::ResultExt; use masking::PeekInterface; use redis_interface::errors::RedisError; -use storage_impl::{redis::kv_store::RedisConnInterface, MockDb}; - -use crate::{errors::CustomResult, services::Store}; +use storage_impl::{errors::StorageError, redis::kv_store::RedisConnInterface, MockDb}; + +pub use self::kafka_store::KafkaStore; +use self::{fraud_check::FraudCheckInterface, organization::OrganizationInterface}; +pub use crate::{ + errors::CustomResult, + services::{ + kafka::{KafkaError, KafkaProducer, MQResult}, + Store, + }, +}; #[derive(PartialEq, Eq)] pub enum StorageImpl { @@ -54,11 +69,12 @@ pub trait StorageInterface: + configs::ConfigInterface + capture::CaptureInterface + customers::CustomerInterface + + dashboard_metadata::DashboardMetadataInterface + dispute::DisputeInterface + ephemeral_key::EphemeralKeyInterface + events::EventInterface + file::FileMetadataInterface - + fraud_check::FraudCheckInterface + + FraudCheckInterface + locker_mock_up::LockerMockUpInterface + mandate::MandateInterface + merchant_account::MerchantAccountInterface @@ -79,11 +95,12 @@ pub trait StorageInterface: + RedisConnInterface + RequestIdStore + business_profile::BusinessProfileInterface - + organization::OrganizationInterface + + OrganizationInterface + routing_algorithm::RoutingAlgorithmInterface + gsm::GsmInterface + user::UserInterface + user_role::UserRoleInterface + + user::sample_data::BatchSampleDataInterface + 'static { fn get_scheduler_db(&self) -> Box; @@ -151,7 +168,6 @@ where T: serde::de::DeserializeOwned, { use common_utils::ext_traits::ByteSliceExt; - use error_stack::ResultExt; let bytes = db.get_key(key).await?; bytes @@ -160,3 +176,72 @@ where } dyn_clone::clone_trait_object!(StorageInterface); + +impl RequestIdStore for KafkaStore { + fn add_request_id(&mut self, request_id: String) { + self.diesel_store.add_request_id(request_id) + } +} + +#[async_trait::async_trait] +impl FraudCheckInterface for KafkaStore { + async fn insert_fraud_check_response( + &self, + new: FraudCheckNew, + ) -> CustomResult { + self.diesel_store.insert_fraud_check_response(new).await + } + async fn update_fraud_check_response_with_attempt_id( + &self, + fraud_check: FraudCheck, + fraud_check_update: FraudCheckUpdate, + ) -> CustomResult { + self.diesel_store + .update_fraud_check_response_with_attempt_id(fraud_check, fraud_check_update) + .await + } + async fn find_fraud_check_by_payment_id( + &self, + payment_id: String, + merchant_id: String, + ) -> CustomResult { + self.diesel_store + .find_fraud_check_by_payment_id(payment_id, merchant_id) + .await + } + async fn find_fraud_check_by_payment_id_if_present( + &self, + payment_id: String, + merchant_id: String, + ) -> CustomResult, StorageError> { + self.diesel_store + .find_fraud_check_by_payment_id_if_present(payment_id, merchant_id) + .await + } +} + +#[async_trait::async_trait] +impl OrganizationInterface for KafkaStore { + async fn insert_organization( + &self, + organization: OrganizationNew, + ) -> CustomResult { + self.diesel_store.insert_organization(organization).await + } + async fn find_organization_by_org_id( + &self, + org_id: &str, + ) -> CustomResult { + self.diesel_store.find_organization_by_org_id(org_id).await + } + + async fn update_organization_by_org_id( + &self, + org_id: &str, + update: OrganizationUpdate, + ) -> CustomResult { + self.diesel_store + .update_organization_by_org_id(org_id, update) + .await + } +} diff --git a/crates/router/src/db/dashboard_metadata.rs b/crates/router/src/db/dashboard_metadata.rs new file mode 100644 index 000000000000..ec24b4ed07da --- /dev/null +++ b/crates/router/src/db/dashboard_metadata.rs @@ -0,0 +1,249 @@ +use diesel_models::{enums, user::dashboard_metadata as storage}; +use error_stack::{IntoReport, ResultExt}; +use storage_impl::MockDb; + +use crate::{ + connection, + core::errors::{self, CustomResult}, + services::Store, +}; + +#[async_trait::async_trait] +pub trait DashboardMetadataInterface { + async fn insert_metadata( + &self, + metadata: storage::DashboardMetadataNew, + ) -> CustomResult; + async fn update_metadata( + &self, + user_id: Option, + merchant_id: String, + org_id: String, + data_key: enums::DashboardMetadata, + dashboard_metadata_update: storage::DashboardMetadataUpdate, + ) -> CustomResult; + + async fn find_user_scoped_dashboard_metadata( + &self, + user_id: &str, + merchant_id: &str, + org_id: &str, + data_keys: Vec, + ) -> CustomResult, errors::StorageError>; + async fn find_merchant_scoped_dashboard_metadata( + &self, + merchant_id: &str, + org_id: &str, + data_keys: Vec, + ) -> CustomResult, errors::StorageError>; +} + +#[async_trait::async_trait] +impl DashboardMetadataInterface for Store { + async fn insert_metadata( + &self, + metadata: storage::DashboardMetadataNew, + ) -> CustomResult { + let conn = connection::pg_connection_write(self).await?; + metadata + .insert(&conn) + .await + .map_err(Into::into) + .into_report() + } + + async fn update_metadata( + &self, + user_id: Option, + merchant_id: String, + org_id: String, + data_key: enums::DashboardMetadata, + dashboard_metadata_update: storage::DashboardMetadataUpdate, + ) -> CustomResult { + let conn = connection::pg_connection_write(self).await?; + storage::DashboardMetadata::update( + &conn, + user_id, + merchant_id, + org_id, + data_key, + dashboard_metadata_update, + ) + .await + .map_err(Into::into) + .into_report() + } + + async fn find_user_scoped_dashboard_metadata( + &self, + user_id: &str, + merchant_id: &str, + org_id: &str, + data_keys: Vec, + ) -> CustomResult, errors::StorageError> { + let conn = connection::pg_connection_write(self).await?; + storage::DashboardMetadata::find_user_scoped_dashboard_metadata( + &conn, + user_id.to_owned(), + merchant_id.to_owned(), + org_id.to_owned(), + data_keys, + ) + .await + .map_err(Into::into) + .into_report() + } + + async fn find_merchant_scoped_dashboard_metadata( + &self, + merchant_id: &str, + org_id: &str, + data_keys: Vec, + ) -> CustomResult, errors::StorageError> { + let conn = connection::pg_connection_write(self).await?; + storage::DashboardMetadata::find_merchant_scoped_dashboard_metadata( + &conn, + merchant_id.to_owned(), + org_id.to_owned(), + data_keys, + ) + .await + .map_err(Into::into) + .into_report() + } +} + +#[async_trait::async_trait] +impl DashboardMetadataInterface for MockDb { + async fn insert_metadata( + &self, + metadata: storage::DashboardMetadataNew, + ) -> CustomResult { + let mut dashboard_metadata = self.dashboard_metadata.lock().await; + if dashboard_metadata.iter().any(|metadata_inner| { + metadata_inner.user_id == metadata.user_id + && metadata_inner.merchant_id == metadata.merchant_id + && metadata_inner.org_id == metadata.org_id + && metadata_inner.data_key == metadata.data_key + }) { + Err(errors::StorageError::DuplicateValue { + entity: "user_id, merchant_id, org_id and data_key", + key: None, + })? + } + let metadata_new = storage::DashboardMetadata { + id: dashboard_metadata + .len() + .try_into() + .into_report() + .change_context(errors::StorageError::MockDbError)?, + user_id: metadata.user_id, + merchant_id: metadata.merchant_id, + org_id: metadata.org_id, + data_key: metadata.data_key, + data_value: metadata.data_value, + created_by: metadata.created_by, + created_at: metadata.created_at, + last_modified_by: metadata.last_modified_by, + last_modified_at: metadata.last_modified_at, + }; + dashboard_metadata.push(metadata_new.clone()); + Ok(metadata_new) + } + + async fn update_metadata( + &self, + user_id: Option, + merchant_id: String, + org_id: String, + data_key: enums::DashboardMetadata, + dashboard_metadata_update: storage::DashboardMetadataUpdate, + ) -> CustomResult { + let mut dashboard_metadata = self.dashboard_metadata.lock().await; + + let dashboard_metadata_to_update = dashboard_metadata + .iter_mut() + .find(|metadata| { + metadata.user_id == user_id + && metadata.merchant_id == merchant_id + && metadata.org_id == org_id + && metadata.data_key == data_key + }) + .ok_or(errors::StorageError::MockDbError)?; + + match dashboard_metadata_update { + storage::DashboardMetadataUpdate::UpdateData { + data_key, + data_value, + last_modified_by, + } => { + dashboard_metadata_to_update.data_key = data_key; + dashboard_metadata_to_update.data_value = data_value; + dashboard_metadata_to_update.last_modified_by = last_modified_by; + dashboard_metadata_to_update.last_modified_at = common_utils::date_time::now(); + } + } + Ok(dashboard_metadata_to_update.clone()) + } + + async fn find_user_scoped_dashboard_metadata( + &self, + user_id: &str, + merchant_id: &str, + org_id: &str, + data_keys: Vec, + ) -> CustomResult, errors::StorageError> { + let dashboard_metadata = self.dashboard_metadata.lock().await; + let query_result = dashboard_metadata + .iter() + .filter(|metadata_inner| { + metadata_inner + .user_id + .clone() + .map(|user_id_inner| user_id_inner == user_id) + .unwrap_or(false) + && metadata_inner.merchant_id == merchant_id + && metadata_inner.org_id == org_id + && data_keys.contains(&metadata_inner.data_key) + }) + .cloned() + .collect::>(); + + if query_result.is_empty() { + return Err(errors::StorageError::ValueNotFound(format!( + "No dashboard_metadata available for user_id = {user_id},\ + merchant_id = {merchant_id}, org_id = {org_id} and data_keys = {data_keys:?}", + )) + .into()); + } + Ok(query_result) + } + + async fn find_merchant_scoped_dashboard_metadata( + &self, + merchant_id: &str, + org_id: &str, + data_keys: Vec, + ) -> CustomResult, errors::StorageError> { + let dashboard_metadata = self.dashboard_metadata.lock().await; + let query_result = dashboard_metadata + .iter() + .filter(|metadata_inner| { + metadata_inner.user_id.is_none() + && metadata_inner.merchant_id == merchant_id + && metadata_inner.org_id == org_id + && data_keys.contains(&metadata_inner.data_key) + }) + .cloned() + .collect::>(); + + if query_result.is_empty() { + return Err(errors::StorageError::ValueNotFound(format!( + "No dashboard_metadata available for merchant_id = {merchant_id},\ + org_id = {org_id} and data_keyss = {data_keys:?}", + )) + .into()); + } + Ok(query_result) + } +} diff --git a/crates/router/src/db/kafka_store.rs b/crates/router/src/db/kafka_store.rs new file mode 100644 index 000000000000..32548e36b6fb --- /dev/null +++ b/crates/router/src/db/kafka_store.rs @@ -0,0 +1,2097 @@ +use std::sync::Arc; + +use common_enums::enums::MerchantStorageScheme; +use common_utils::errors::CustomResult; +use data_models::payments::{ + payment_attempt::PaymentAttemptInterface, payment_intent::PaymentIntentInterface, +}; +use diesel_models::{ + enums, + enums::ProcessTrackerStatus, + ephemeral_key::{EphemeralKey, EphemeralKeyNew}, + reverse_lookup::{ReverseLookup, ReverseLookupNew}, + user_role as user_storage, +}; +use masking::Secret; +use redis_interface::{errors::RedisError, RedisConnectionPool, RedisEntryId}; +use router_env::logger; +use scheduler::{ + db::{process_tracker::ProcessTrackerInterface, queue::QueueInterface}, + SchedulerInterface, +}; +use storage_impl::redis::kv_store::RedisConnInterface; +use time::PrimitiveDateTime; + +use super::{ + dashboard_metadata::DashboardMetadataInterface, + user::{sample_data::BatchSampleDataInterface, UserInterface}, + user_role::UserRoleInterface, +}; +use crate::{ + core::errors::{self, ProcessTrackerError}, + db::{ + address::AddressInterface, + api_keys::ApiKeyInterface, + business_profile::BusinessProfileInterface, + capture::CaptureInterface, + cards_info::CardsInfoInterface, + configs::ConfigInterface, + customers::CustomerInterface, + dispute::DisputeInterface, + ephemeral_key::EphemeralKeyInterface, + events::EventInterface, + file::FileMetadataInterface, + gsm::GsmInterface, + locker_mock_up::LockerMockUpInterface, + mandate::MandateInterface, + merchant_account::MerchantAccountInterface, + merchant_connector_account::{ConnectorAccessToken, MerchantConnectorAccountInterface}, + merchant_key_store::MerchantKeyStoreInterface, + payment_link::PaymentLinkInterface, + payment_method::PaymentMethodInterface, + payout_attempt::PayoutAttemptInterface, + payouts::PayoutsInterface, + refund::RefundInterface, + reverse_lookup::ReverseLookupInterface, + routing_algorithm::RoutingAlgorithmInterface, + MasterKeyInterface, StorageInterface, + }, + services::{authentication, kafka::KafkaProducer, Store}, + types::{ + domain, + storage::{self, business_profile}, + AccessToken, + }, +}; + +#[derive(Clone)] +pub struct KafkaStore { + kafka_producer: KafkaProducer, + pub diesel_store: Store, +} + +impl KafkaStore { + pub async fn new(store: Store, kafka_producer: KafkaProducer) -> Self { + Self { + kafka_producer, + diesel_store: store, + } + } +} + +#[async_trait::async_trait] +impl AddressInterface for KafkaStore { + async fn find_address_by_address_id( + &self, + address_id: &str, + key_store: &domain::MerchantKeyStore, + ) -> CustomResult { + self.diesel_store + .find_address_by_address_id(address_id, key_store) + .await + } + + async fn update_address( + &self, + address_id: String, + address: storage::AddressUpdate, + key_store: &domain::MerchantKeyStore, + ) -> CustomResult { + self.diesel_store + .update_address(address_id, address, key_store) + .await + } + + async fn update_address_for_payments( + &self, + this: domain::Address, + address: domain::AddressUpdate, + payment_id: String, + key_store: &domain::MerchantKeyStore, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + self.diesel_store + .update_address_for_payments(this, address, payment_id, key_store, storage_scheme) + .await + } + + async fn insert_address_for_payments( + &self, + payment_id: &str, + address: domain::Address, + key_store: &domain::MerchantKeyStore, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + self.diesel_store + .insert_address_for_payments(payment_id, address, key_store, storage_scheme) + .await + } + + async fn find_address_by_merchant_id_payment_id_address_id( + &self, + merchant_id: &str, + payment_id: &str, + address_id: &str, + key_store: &domain::MerchantKeyStore, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + self.diesel_store + .find_address_by_merchant_id_payment_id_address_id( + merchant_id, + payment_id, + address_id, + key_store, + storage_scheme, + ) + .await + } + + async fn insert_address_for_customers( + &self, + address: domain::Address, + key_store: &domain::MerchantKeyStore, + ) -> CustomResult { + self.diesel_store + .insert_address_for_customers(address, key_store) + .await + } + + async fn update_address_by_merchant_id_customer_id( + &self, + customer_id: &str, + merchant_id: &str, + address: storage::AddressUpdate, + key_store: &domain::MerchantKeyStore, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .update_address_by_merchant_id_customer_id(customer_id, merchant_id, address, key_store) + .await + } +} + +#[async_trait::async_trait] +impl ApiKeyInterface for KafkaStore { + async fn insert_api_key( + &self, + api_key: storage::ApiKeyNew, + ) -> CustomResult { + self.diesel_store.insert_api_key(api_key).await + } + + async fn update_api_key( + &self, + merchant_id: String, + key_id: String, + api_key: storage::ApiKeyUpdate, + ) -> CustomResult { + self.diesel_store + .update_api_key(merchant_id, key_id, api_key) + .await + } + + async fn revoke_api_key( + &self, + merchant_id: &str, + key_id: &str, + ) -> CustomResult { + self.diesel_store.revoke_api_key(merchant_id, key_id).await + } + + async fn find_api_key_by_merchant_id_key_id_optional( + &self, + merchant_id: &str, + key_id: &str, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .find_api_key_by_merchant_id_key_id_optional(merchant_id, key_id) + .await + } + + async fn find_api_key_by_hash_optional( + &self, + hashed_api_key: storage::HashedApiKey, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .find_api_key_by_hash_optional(hashed_api_key) + .await + } + + async fn list_api_keys_by_merchant_id( + &self, + merchant_id: &str, + limit: Option, + offset: Option, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .list_api_keys_by_merchant_id(merchant_id, limit, offset) + .await + } +} + +#[async_trait::async_trait] +impl CardsInfoInterface for KafkaStore { + async fn get_card_info( + &self, + card_iin: &str, + ) -> CustomResult, errors::StorageError> { + self.diesel_store.get_card_info(card_iin).await + } +} + +#[async_trait::async_trait] +impl ConfigInterface for KafkaStore { + async fn insert_config( + &self, + config: storage::ConfigNew, + ) -> CustomResult { + self.diesel_store.insert_config(config).await + } + + async fn find_config_by_key( + &self, + key: &str, + ) -> CustomResult { + self.diesel_store.find_config_by_key(key).await + } + + async fn find_config_by_key_from_db( + &self, + key: &str, + ) -> CustomResult { + self.diesel_store.find_config_by_key_from_db(key).await + } + + async fn update_config_in_database( + &self, + key: &str, + config_update: storage::ConfigUpdate, + ) -> CustomResult { + self.diesel_store + .update_config_in_database(key, config_update) + .await + } + + async fn update_config_by_key( + &self, + key: &str, + config_update: storage::ConfigUpdate, + ) -> CustomResult { + self.diesel_store + .update_config_by_key(key, config_update) + .await + } + + async fn delete_config_by_key(&self, key: &str) -> CustomResult { + self.diesel_store.delete_config_by_key(key).await + } + + async fn find_config_by_key_unwrap_or( + &self, + key: &str, + default_config: Option, + ) -> CustomResult { + self.diesel_store + .find_config_by_key_unwrap_or(key, default_config) + .await + } +} + +#[async_trait::async_trait] +impl CustomerInterface for KafkaStore { + async fn delete_customer_by_customer_id_merchant_id( + &self, + customer_id: &str, + merchant_id: &str, + ) -> CustomResult { + self.diesel_store + .delete_customer_by_customer_id_merchant_id(customer_id, merchant_id) + .await + } + + async fn find_customer_optional_by_customer_id_merchant_id( + &self, + customer_id: &str, + merchant_id: &str, + key_store: &domain::MerchantKeyStore, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .find_customer_optional_by_customer_id_merchant_id(customer_id, merchant_id, key_store) + .await + } + + async fn update_customer_by_customer_id_merchant_id( + &self, + customer_id: String, + merchant_id: String, + customer: storage::CustomerUpdate, + key_store: &domain::MerchantKeyStore, + ) -> CustomResult { + self.diesel_store + .update_customer_by_customer_id_merchant_id( + customer_id, + merchant_id, + customer, + key_store, + ) + .await + } + + async fn list_customers_by_merchant_id( + &self, + merchant_id: &str, + key_store: &domain::MerchantKeyStore, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .list_customers_by_merchant_id(merchant_id, key_store) + .await + } + + async fn find_customer_by_customer_id_merchant_id( + &self, + customer_id: &str, + merchant_id: &str, + key_store: &domain::MerchantKeyStore, + ) -> CustomResult { + self.diesel_store + .find_customer_by_customer_id_merchant_id(customer_id, merchant_id, key_store) + .await + } + + async fn insert_customer( + &self, + customer_data: domain::Customer, + key_store: &domain::MerchantKeyStore, + ) -> CustomResult { + self.diesel_store + .insert_customer(customer_data, key_store) + .await + } +} + +#[async_trait::async_trait] +impl DisputeInterface for KafkaStore { + async fn insert_dispute( + &self, + dispute: storage::DisputeNew, + ) -> CustomResult { + self.diesel_store.insert_dispute(dispute).await + } + + async fn find_by_merchant_id_payment_id_connector_dispute_id( + &self, + merchant_id: &str, + payment_id: &str, + connector_dispute_id: &str, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .find_by_merchant_id_payment_id_connector_dispute_id( + merchant_id, + payment_id, + connector_dispute_id, + ) + .await + } + + async fn find_dispute_by_merchant_id_dispute_id( + &self, + merchant_id: &str, + dispute_id: &str, + ) -> CustomResult { + self.diesel_store + .find_dispute_by_merchant_id_dispute_id(merchant_id, dispute_id) + .await + } + + async fn find_disputes_by_merchant_id( + &self, + merchant_id: &str, + dispute_constraints: api_models::disputes::DisputeListConstraints, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .find_disputes_by_merchant_id(merchant_id, dispute_constraints) + .await + } + + async fn update_dispute( + &self, + this: storage::Dispute, + dispute: storage::DisputeUpdate, + ) -> CustomResult { + self.diesel_store.update_dispute(this, dispute).await + } + + async fn find_disputes_by_merchant_id_payment_id( + &self, + merchant_id: &str, + payment_id: &str, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .find_disputes_by_merchant_id_payment_id(merchant_id, payment_id) + .await + } +} + +#[async_trait::async_trait] +impl EphemeralKeyInterface for KafkaStore { + async fn create_ephemeral_key( + &self, + ek: EphemeralKeyNew, + validity: i64, + ) -> CustomResult { + self.diesel_store.create_ephemeral_key(ek, validity).await + } + async fn get_ephemeral_key( + &self, + key: &str, + ) -> CustomResult { + self.diesel_store.get_ephemeral_key(key).await + } + async fn delete_ephemeral_key( + &self, + id: &str, + ) -> CustomResult { + self.diesel_store.delete_ephemeral_key(id).await + } +} + +#[async_trait::async_trait] +impl EventInterface for KafkaStore { + async fn insert_event( + &self, + event: storage::EventNew, + ) -> CustomResult { + self.diesel_store.insert_event(event).await + } + + async fn update_event( + &self, + event_id: String, + event: storage::EventUpdate, + ) -> CustomResult { + self.diesel_store.update_event(event_id, event).await + } +} + +#[async_trait::async_trait] +impl LockerMockUpInterface for KafkaStore { + async fn find_locker_by_card_id( + &self, + card_id: &str, + ) -> CustomResult { + self.diesel_store.find_locker_by_card_id(card_id).await + } + + async fn insert_locker_mock_up( + &self, + new: storage::LockerMockUpNew, + ) -> CustomResult { + self.diesel_store.insert_locker_mock_up(new).await + } + + async fn delete_locker_mock_up( + &self, + card_id: &str, + ) -> CustomResult { + self.diesel_store.delete_locker_mock_up(card_id).await + } +} + +#[async_trait::async_trait] +impl MandateInterface for KafkaStore { + async fn find_mandate_by_merchant_id_mandate_id( + &self, + merchant_id: &str, + mandate_id: &str, + ) -> CustomResult { + self.diesel_store + .find_mandate_by_merchant_id_mandate_id(merchant_id, mandate_id) + .await + } + + async fn find_mandate_by_merchant_id_connector_mandate_id( + &self, + merchant_id: &str, + connector_mandate_id: &str, + ) -> CustomResult { + self.diesel_store + .find_mandate_by_merchant_id_connector_mandate_id(merchant_id, connector_mandate_id) + .await + } + + async fn find_mandate_by_merchant_id_customer_id( + &self, + merchant_id: &str, + customer_id: &str, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .find_mandate_by_merchant_id_customer_id(merchant_id, customer_id) + .await + } + + async fn update_mandate_by_merchant_id_mandate_id( + &self, + merchant_id: &str, + mandate_id: &str, + mandate: storage::MandateUpdate, + ) -> CustomResult { + self.diesel_store + .update_mandate_by_merchant_id_mandate_id(merchant_id, mandate_id, mandate) + .await + } + + async fn find_mandates_by_merchant_id( + &self, + merchant_id: &str, + mandate_constraints: api_models::mandates::MandateListConstraints, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .find_mandates_by_merchant_id(merchant_id, mandate_constraints) + .await + } + + async fn insert_mandate( + &self, + mandate: storage::MandateNew, + ) -> CustomResult { + self.diesel_store.insert_mandate(mandate).await + } +} + +#[async_trait::async_trait] +impl PaymentLinkInterface for KafkaStore { + async fn find_payment_link_by_payment_link_id( + &self, + payment_link_id: &str, + ) -> CustomResult { + self.diesel_store + .find_payment_link_by_payment_link_id(payment_link_id) + .await + } + + async fn insert_payment_link( + &self, + payment_link_object: storage::PaymentLinkNew, + ) -> CustomResult { + self.diesel_store + .insert_payment_link(payment_link_object) + .await + } + + async fn list_payment_link_by_merchant_id( + &self, + merchant_id: &str, + payment_link_constraints: api_models::payments::PaymentLinkListConstraints, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .list_payment_link_by_merchant_id(merchant_id, payment_link_constraints) + .await + } +} + +#[async_trait::async_trait] +impl MerchantAccountInterface for KafkaStore { + async fn insert_merchant( + &self, + merchant_account: domain::MerchantAccount, + key_store: &domain::MerchantKeyStore, + ) -> CustomResult { + self.diesel_store + .insert_merchant(merchant_account, key_store) + .await + } + + async fn find_merchant_account_by_merchant_id( + &self, + merchant_id: &str, + key_store: &domain::MerchantKeyStore, + ) -> CustomResult { + self.diesel_store + .find_merchant_account_by_merchant_id(merchant_id, key_store) + .await + } + + async fn update_merchant( + &self, + this: domain::MerchantAccount, + merchant_account: storage::MerchantAccountUpdate, + key_store: &domain::MerchantKeyStore, + ) -> CustomResult { + self.diesel_store + .update_merchant(this, merchant_account, key_store) + .await + } + + async fn update_specific_fields_in_merchant( + &self, + merchant_id: &str, + merchant_account: storage::MerchantAccountUpdate, + key_store: &domain::MerchantKeyStore, + ) -> CustomResult { + self.diesel_store + .update_specific_fields_in_merchant(merchant_id, merchant_account, key_store) + .await + } + + async fn find_merchant_account_by_publishable_key( + &self, + publishable_key: &str, + ) -> CustomResult { + self.diesel_store + .find_merchant_account_by_publishable_key(publishable_key) + .await + } + + #[cfg(feature = "olap")] + async fn list_merchant_accounts_by_organization_id( + &self, + organization_id: &str, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .list_merchant_accounts_by_organization_id(organization_id) + .await + } + + async fn delete_merchant_account_by_merchant_id( + &self, + merchant_id: &str, + ) -> CustomResult { + self.diesel_store + .delete_merchant_account_by_merchant_id(merchant_id) + .await + } +} + +#[async_trait::async_trait] +impl ConnectorAccessToken for KafkaStore { + async fn get_access_token( + &self, + merchant_id: &str, + connector_name: &str, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .get_access_token(merchant_id, connector_name) + .await + } + + async fn set_access_token( + &self, + merchant_id: &str, + connector_name: &str, + access_token: AccessToken, + ) -> CustomResult<(), errors::StorageError> { + self.diesel_store + .set_access_token(merchant_id, connector_name, access_token) + .await + } +} + +#[async_trait::async_trait] +impl FileMetadataInterface for KafkaStore { + async fn insert_file_metadata( + &self, + file: storage::FileMetadataNew, + ) -> CustomResult { + self.diesel_store.insert_file_metadata(file).await + } + + async fn find_file_metadata_by_merchant_id_file_id( + &self, + merchant_id: &str, + file_id: &str, + ) -> CustomResult { + self.diesel_store + .find_file_metadata_by_merchant_id_file_id(merchant_id, file_id) + .await + } + + async fn delete_file_metadata_by_merchant_id_file_id( + &self, + merchant_id: &str, + file_id: &str, + ) -> CustomResult { + self.diesel_store + .delete_file_metadata_by_merchant_id_file_id(merchant_id, file_id) + .await + } + + async fn update_file_metadata( + &self, + this: storage::FileMetadata, + file_metadata: storage::FileMetadataUpdate, + ) -> CustomResult { + self.diesel_store + .update_file_metadata(this, file_metadata) + .await + } +} + +#[async_trait::async_trait] +impl MerchantConnectorAccountInterface for KafkaStore { + async fn find_merchant_connector_account_by_merchant_id_connector_label( + &self, + merchant_id: &str, + connector: &str, + key_store: &domain::MerchantKeyStore, + ) -> CustomResult { + self.diesel_store + .find_merchant_connector_account_by_merchant_id_connector_label( + merchant_id, + connector, + key_store, + ) + .await + } + + async fn find_merchant_connector_account_by_merchant_id_connector_name( + &self, + merchant_id: &str, + connector_name: &str, + key_store: &domain::MerchantKeyStore, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .find_merchant_connector_account_by_merchant_id_connector_name( + merchant_id, + connector_name, + key_store, + ) + .await + } + + async fn find_merchant_connector_account_by_profile_id_connector_name( + &self, + profile_id: &str, + connector_name: &str, + key_store: &domain::MerchantKeyStore, + ) -> CustomResult { + self.diesel_store + .find_merchant_connector_account_by_profile_id_connector_name( + profile_id, + connector_name, + key_store, + ) + .await + } + + async fn insert_merchant_connector_account( + &self, + t: domain::MerchantConnectorAccount, + key_store: &domain::MerchantKeyStore, + ) -> CustomResult { + self.diesel_store + .insert_merchant_connector_account(t, key_store) + .await + } + + async fn find_by_merchant_connector_account_merchant_id_merchant_connector_id( + &self, + merchant_id: &str, + merchant_connector_id: &str, + key_store: &domain::MerchantKeyStore, + ) -> CustomResult { + self.diesel_store + .find_by_merchant_connector_account_merchant_id_merchant_connector_id( + merchant_id, + merchant_connector_id, + key_store, + ) + .await + } + + async fn find_merchant_connector_account_by_merchant_id_and_disabled_list( + &self, + merchant_id: &str, + get_disabled: bool, + key_store: &domain::MerchantKeyStore, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .find_merchant_connector_account_by_merchant_id_and_disabled_list( + merchant_id, + get_disabled, + key_store, + ) + .await + } + + async fn update_merchant_connector_account( + &self, + this: domain::MerchantConnectorAccount, + merchant_connector_account: storage::MerchantConnectorAccountUpdateInternal, + key_store: &domain::MerchantKeyStore, + ) -> CustomResult { + self.diesel_store + .update_merchant_connector_account(this, merchant_connector_account, key_store) + .await + } + + async fn delete_merchant_connector_account_by_merchant_id_merchant_connector_id( + &self, + merchant_id: &str, + merchant_connector_id: &str, + ) -> CustomResult { + self.diesel_store + .delete_merchant_connector_account_by_merchant_id_merchant_connector_id( + merchant_id, + merchant_connector_id, + ) + .await + } +} + +#[async_trait::async_trait] +impl QueueInterface for KafkaStore { + async fn fetch_consumer_tasks( + &self, + stream_name: &str, + group_name: &str, + consumer_name: &str, + ) -> CustomResult, ProcessTrackerError> { + self.diesel_store + .fetch_consumer_tasks(stream_name, group_name, consumer_name) + .await + } + + async fn consumer_group_create( + &self, + stream: &str, + group: &str, + id: &RedisEntryId, + ) -> CustomResult<(), RedisError> { + self.diesel_store + .consumer_group_create(stream, group, id) + .await + } + + async fn acquire_pt_lock( + &self, + tag: &str, + lock_key: &str, + lock_val: &str, + ttl: i64, + ) -> CustomResult { + self.diesel_store + .acquire_pt_lock(tag, lock_key, lock_val, ttl) + .await + } + + async fn release_pt_lock(&self, tag: &str, lock_key: &str) -> CustomResult { + self.diesel_store.release_pt_lock(tag, lock_key).await + } + + async fn stream_append_entry( + &self, + stream: &str, + entry_id: &RedisEntryId, + fields: Vec<(&str, String)>, + ) -> CustomResult<(), RedisError> { + self.diesel_store + .stream_append_entry(stream, entry_id, fields) + .await + } + + async fn get_key(&self, key: &str) -> CustomResult, RedisError> { + self.diesel_store.get_key(key).await + } +} + +#[async_trait::async_trait] +impl PaymentAttemptInterface for KafkaStore { + async fn insert_payment_attempt( + &self, + payment_attempt: storage::PaymentAttemptNew, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + let attempt = self + .diesel_store + .insert_payment_attempt(payment_attempt, storage_scheme) + .await?; + + if let Err(er) = self + .kafka_producer + .log_payment_attempt(&attempt, None) + .await + { + logger::error!(message="Failed to log analytics event for payment attempt {attempt:?}", error_message=?er) + } + + Ok(attempt) + } + + async fn update_payment_attempt_with_attempt_id( + &self, + this: storage::PaymentAttempt, + payment_attempt: storage::PaymentAttemptUpdate, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + let attempt = self + .diesel_store + .update_payment_attempt_with_attempt_id(this.clone(), payment_attempt, storage_scheme) + .await?; + + if let Err(er) = self + .kafka_producer + .log_payment_attempt(&attempt, Some(this)) + .await + { + logger::error!(message="Failed to log analytics event for payment attempt {attempt:?}", error_message=?er) + } + + Ok(attempt) + } + + async fn find_payment_attempt_by_connector_transaction_id_payment_id_merchant_id( + &self, + connector_transaction_id: &str, + payment_id: &str, + merchant_id: &str, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + self.diesel_store + .find_payment_attempt_by_connector_transaction_id_payment_id_merchant_id( + connector_transaction_id, + payment_id, + merchant_id, + storage_scheme, + ) + .await + } + + async fn find_payment_attempt_by_merchant_id_connector_txn_id( + &self, + merchant_id: &str, + connector_txn_id: &str, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + self.diesel_store + .find_payment_attempt_by_merchant_id_connector_txn_id( + merchant_id, + connector_txn_id, + storage_scheme, + ) + .await + } + + async fn find_payment_attempt_by_payment_id_merchant_id_attempt_id( + &self, + payment_id: &str, + merchant_id: &str, + attempt_id: &str, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + self.diesel_store + .find_payment_attempt_by_payment_id_merchant_id_attempt_id( + payment_id, + merchant_id, + attempt_id, + storage_scheme, + ) + .await + } + + async fn find_payment_attempt_by_attempt_id_merchant_id( + &self, + attempt_id: &str, + merchant_id: &str, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + self.diesel_store + .find_payment_attempt_by_attempt_id_merchant_id(attempt_id, merchant_id, storage_scheme) + .await + } + + async fn find_payment_attempt_last_successful_attempt_by_payment_id_merchant_id( + &self, + payment_id: &str, + merchant_id: &str, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + self.diesel_store + .find_payment_attempt_last_successful_attempt_by_payment_id_merchant_id( + payment_id, + merchant_id, + storage_scheme, + ) + .await + } + + async fn find_payment_attempt_last_successful_or_partially_captured_attempt_by_payment_id_merchant_id( + &self, + payment_id: &str, + merchant_id: &str, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + self.diesel_store + .find_payment_attempt_last_successful_or_partially_captured_attempt_by_payment_id_merchant_id( + payment_id, + merchant_id, + storage_scheme, + ) + .await + } + + async fn find_payment_attempt_by_preprocessing_id_merchant_id( + &self, + preprocessing_id: &str, + merchant_id: &str, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + self.diesel_store + .find_payment_attempt_by_preprocessing_id_merchant_id( + preprocessing_id, + merchant_id, + storage_scheme, + ) + .await + } + + async fn get_filters_for_payments( + &self, + pi: &[data_models::payments::PaymentIntent], + merchant_id: &str, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult< + data_models::payments::payment_attempt::PaymentListFilters, + errors::DataStorageError, + > { + self.diesel_store + .get_filters_for_payments(pi, merchant_id, storage_scheme) + .await + } + + async fn get_total_count_of_filtered_payment_attempts( + &self, + merchant_id: &str, + active_attempt_ids: &[String], + connector: Option>, + payment_method: Option>, + payment_method_type: Option>, + authentication_type: Option>, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + self.diesel_store + .get_total_count_of_filtered_payment_attempts( + merchant_id, + active_attempt_ids, + connector, + payment_method, + payment_method_type, + authentication_type, + storage_scheme, + ) + .await + } + + async fn find_attempts_by_merchant_id_payment_id( + &self, + merchant_id: &str, + payment_id: &str, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult, errors::DataStorageError> { + self.diesel_store + .find_attempts_by_merchant_id_payment_id(merchant_id, payment_id, storage_scheme) + .await + } +} + +#[async_trait::async_trait] +impl PaymentIntentInterface for KafkaStore { + async fn update_payment_intent( + &self, + this: storage::PaymentIntent, + payment_intent: storage::PaymentIntentUpdate, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + let intent = self + .diesel_store + .update_payment_intent(this.clone(), payment_intent, storage_scheme) + .await?; + + if let Err(er) = self + .kafka_producer + .log_payment_intent(&intent, Some(this)) + .await + { + logger::error!(message="Failed to add analytics entry for Payment Intent {intent:?}", error_message=?er); + }; + + Ok(intent) + } + + async fn insert_payment_intent( + &self, + new: storage::PaymentIntentNew, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + logger::debug!("Inserting PaymentIntent Via KafkaStore"); + let intent = self + .diesel_store + .insert_payment_intent(new, storage_scheme) + .await?; + + if let Err(er) = self.kafka_producer.log_payment_intent(&intent, None).await { + logger::error!(message="Failed to add analytics entry for Payment Intent {intent:?}", error_message=?er); + }; + + Ok(intent) + } + + async fn find_payment_intent_by_payment_id_merchant_id( + &self, + payment_id: &str, + merchant_id: &str, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + self.diesel_store + .find_payment_intent_by_payment_id_merchant_id(payment_id, merchant_id, storage_scheme) + .await + } + + #[cfg(feature = "olap")] + async fn filter_payment_intent_by_constraints( + &self, + merchant_id: &str, + filters: &data_models::payments::payment_intent::PaymentIntentFetchConstraints, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult, errors::DataStorageError> { + self.diesel_store + .filter_payment_intent_by_constraints(merchant_id, filters, storage_scheme) + .await + } + + #[cfg(feature = "olap")] + async fn filter_payment_intents_by_time_range_constraints( + &self, + merchant_id: &str, + time_range: &api_models::payments::TimeRange, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult, errors::DataStorageError> { + self.diesel_store + .filter_payment_intents_by_time_range_constraints( + merchant_id, + time_range, + storage_scheme, + ) + .await + } + + #[cfg(feature = "olap")] + async fn get_filtered_payment_intents_attempt( + &self, + merchant_id: &str, + constraints: &data_models::payments::payment_intent::PaymentIntentFetchConstraints, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult< + Vec<( + data_models::payments::PaymentIntent, + data_models::payments::payment_attempt::PaymentAttempt, + )>, + errors::DataStorageError, + > { + self.diesel_store + .get_filtered_payment_intents_attempt(merchant_id, constraints, storage_scheme) + .await + } + + #[cfg(feature = "olap")] + async fn get_filtered_active_attempt_ids_for_total_count( + &self, + merchant_id: &str, + constraints: &data_models::payments::payment_intent::PaymentIntentFetchConstraints, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult, errors::DataStorageError> { + self.diesel_store + .get_filtered_active_attempt_ids_for_total_count( + merchant_id, + constraints, + storage_scheme, + ) + .await + } + + async fn get_active_payment_attempt( + &self, + payment: &mut storage::PaymentIntent, + storage_scheme: MerchantStorageScheme, + ) -> error_stack::Result { + self.diesel_store + .get_active_payment_attempt(payment, storage_scheme) + .await + } +} + +#[async_trait::async_trait] +impl PaymentMethodInterface for KafkaStore { + async fn find_payment_method( + &self, + payment_method_id: &str, + ) -> CustomResult { + self.diesel_store + .find_payment_method(payment_method_id) + .await + } + + async fn find_payment_method_by_customer_id_merchant_id_list( + &self, + customer_id: &str, + merchant_id: &str, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .find_payment_method_by_customer_id_merchant_id_list(customer_id, merchant_id) + .await + } + + async fn insert_payment_method( + &self, + m: storage::PaymentMethodNew, + ) -> CustomResult { + self.diesel_store.insert_payment_method(m).await + } + + async fn update_payment_method( + &self, + payment_method: storage::PaymentMethod, + payment_method_update: storage::PaymentMethodUpdate, + ) -> CustomResult { + self.diesel_store + .update_payment_method(payment_method, payment_method_update) + .await + } + + async fn delete_payment_method_by_merchant_id_payment_method_id( + &self, + merchant_id: &str, + payment_method_id: &str, + ) -> CustomResult { + self.diesel_store + .delete_payment_method_by_merchant_id_payment_method_id(merchant_id, payment_method_id) + .await + } +} + +#[async_trait::async_trait] +impl PayoutAttemptInterface for KafkaStore { + async fn find_payout_attempt_by_merchant_id_payout_id( + &self, + merchant_id: &str, + payout_id: &str, + ) -> CustomResult { + self.diesel_store + .find_payout_attempt_by_merchant_id_payout_id(merchant_id, payout_id) + .await + } + + async fn update_payout_attempt_by_merchant_id_payout_id( + &self, + merchant_id: &str, + payout_id: &str, + payout: storage::PayoutAttemptUpdate, + ) -> CustomResult { + self.diesel_store + .update_payout_attempt_by_merchant_id_payout_id(merchant_id, payout_id, payout) + .await + } + + async fn insert_payout_attempt( + &self, + payout: storage::PayoutAttemptNew, + ) -> CustomResult { + self.diesel_store.insert_payout_attempt(payout).await + } +} + +#[async_trait::async_trait] +impl PayoutsInterface for KafkaStore { + async fn find_payout_by_merchant_id_payout_id( + &self, + merchant_id: &str, + payout_id: &str, + ) -> CustomResult { + self.diesel_store + .find_payout_by_merchant_id_payout_id(merchant_id, payout_id) + .await + } + + async fn update_payout_by_merchant_id_payout_id( + &self, + merchant_id: &str, + payout_id: &str, + payout: storage::PayoutsUpdate, + ) -> CustomResult { + self.diesel_store + .update_payout_by_merchant_id_payout_id(merchant_id, payout_id, payout) + .await + } + + async fn insert_payout( + &self, + payout: storage::PayoutsNew, + ) -> CustomResult { + self.diesel_store.insert_payout(payout).await + } +} + +#[async_trait::async_trait] +impl ProcessTrackerInterface for KafkaStore { + async fn reinitialize_limbo_processes( + &self, + ids: Vec, + schedule_time: PrimitiveDateTime, + ) -> CustomResult { + self.diesel_store + .reinitialize_limbo_processes(ids, schedule_time) + .await + } + + async fn find_process_by_id( + &self, + id: &str, + ) -> CustomResult, errors::StorageError> { + self.diesel_store.find_process_by_id(id).await + } + + async fn update_process( + &self, + this: storage::ProcessTracker, + process: storage::ProcessTrackerUpdate, + ) -> CustomResult { + self.diesel_store.update_process(this, process).await + } + + async fn process_tracker_update_process_status_by_ids( + &self, + task_ids: Vec, + task_update: storage::ProcessTrackerUpdate, + ) -> CustomResult { + self.diesel_store + .process_tracker_update_process_status_by_ids(task_ids, task_update) + .await + } + async fn update_process_tracker( + &self, + this: storage::ProcessTracker, + process: storage::ProcessTrackerUpdate, + ) -> CustomResult { + self.diesel_store + .update_process_tracker(this, process) + .await + } + + async fn insert_process( + &self, + new: storage::ProcessTrackerNew, + ) -> CustomResult { + self.diesel_store.insert_process(new).await + } + + async fn find_processes_by_time_status( + &self, + time_lower_limit: PrimitiveDateTime, + time_upper_limit: PrimitiveDateTime, + status: ProcessTrackerStatus, + limit: Option, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .find_processes_by_time_status(time_lower_limit, time_upper_limit, status, limit) + .await + } +} + +#[async_trait::async_trait] +impl CaptureInterface for KafkaStore { + async fn insert_capture( + &self, + capture: storage::CaptureNew, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + self.diesel_store + .insert_capture(capture, storage_scheme) + .await + } + + async fn update_capture_with_capture_id( + &self, + this: storage::Capture, + capture: storage::CaptureUpdate, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + self.diesel_store + .update_capture_with_capture_id(this, capture, storage_scheme) + .await + } + + async fn find_all_captures_by_merchant_id_payment_id_authorized_attempt_id( + &self, + merchant_id: &str, + payment_id: &str, + authorized_attempt_id: &str, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .find_all_captures_by_merchant_id_payment_id_authorized_attempt_id( + merchant_id, + payment_id, + authorized_attempt_id, + storage_scheme, + ) + .await + } +} + +#[async_trait::async_trait] +impl RefundInterface for KafkaStore { + async fn find_refund_by_internal_reference_id_merchant_id( + &self, + internal_reference_id: &str, + merchant_id: &str, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + self.diesel_store + .find_refund_by_internal_reference_id_merchant_id( + internal_reference_id, + merchant_id, + storage_scheme, + ) + .await + } + + async fn find_refund_by_payment_id_merchant_id( + &self, + payment_id: &str, + merchant_id: &str, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .find_refund_by_payment_id_merchant_id(payment_id, merchant_id, storage_scheme) + .await + } + + async fn find_refund_by_merchant_id_refund_id( + &self, + merchant_id: &str, + refund_id: &str, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + self.diesel_store + .find_refund_by_merchant_id_refund_id(merchant_id, refund_id, storage_scheme) + .await + } + + async fn find_refund_by_merchant_id_connector_refund_id_connector( + &self, + merchant_id: &str, + connector_refund_id: &str, + connector: &str, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + self.diesel_store + .find_refund_by_merchant_id_connector_refund_id_connector( + merchant_id, + connector_refund_id, + connector, + storage_scheme, + ) + .await + } + + async fn update_refund( + &self, + this: storage::Refund, + refund: storage::RefundUpdate, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + let refund = self + .diesel_store + .update_refund(this.clone(), refund, storage_scheme) + .await?; + + if let Err(er) = self.kafka_producer.log_refund(&refund, Some(this)).await { + logger::error!(message="Failed to insert analytics event for Refund Update {refund?}", error_message=?er); + } + Ok(refund) + } + + async fn find_refund_by_merchant_id_connector_transaction_id( + &self, + merchant_id: &str, + connector_transaction_id: &str, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .find_refund_by_merchant_id_connector_transaction_id( + merchant_id, + connector_transaction_id, + storage_scheme, + ) + .await + } + + async fn insert_refund( + &self, + new: storage::RefundNew, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + let refund = self.diesel_store.insert_refund(new, storage_scheme).await?; + + if let Err(er) = self.kafka_producer.log_refund(&refund, None).await { + logger::error!(message="Failed to insert analytics event for Refund Create {refund?}", error_message=?er); + } + Ok(refund) + } + + #[cfg(feature = "olap")] + async fn filter_refund_by_constraints( + &self, + merchant_id: &str, + refund_details: &api_models::refunds::RefundListRequest, + storage_scheme: MerchantStorageScheme, + limit: i64, + offset: i64, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .filter_refund_by_constraints( + merchant_id, + refund_details, + storage_scheme, + limit, + offset, + ) + .await + } + + #[cfg(feature = "olap")] + async fn filter_refund_by_meta_constraints( + &self, + merchant_id: &str, + refund_details: &api_models::payments::TimeRange, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + self.diesel_store + .filter_refund_by_meta_constraints(merchant_id, refund_details, storage_scheme) + .await + } + + #[cfg(feature = "olap")] + async fn get_total_count_of_refunds( + &self, + merchant_id: &str, + refund_details: &api_models::refunds::RefundListRequest, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + self.diesel_store + .get_total_count_of_refunds(merchant_id, refund_details, storage_scheme) + .await + } +} + +#[async_trait::async_trait] +impl MerchantKeyStoreInterface for KafkaStore { + async fn insert_merchant_key_store( + &self, + merchant_key_store: domain::MerchantKeyStore, + key: &Secret>, + ) -> CustomResult { + self.diesel_store + .insert_merchant_key_store(merchant_key_store, key) + .await + } + + async fn get_merchant_key_store_by_merchant_id( + &self, + merchant_id: &str, + key: &Secret>, + ) -> CustomResult { + self.diesel_store + .get_merchant_key_store_by_merchant_id(merchant_id, key) + .await + } + + async fn delete_merchant_key_store_by_merchant_id( + &self, + merchant_id: &str, + ) -> CustomResult { + self.diesel_store + .delete_merchant_key_store_by_merchant_id(merchant_id) + .await + } +} + +#[async_trait::async_trait] +impl BusinessProfileInterface for KafkaStore { + async fn insert_business_profile( + &self, + business_profile: business_profile::BusinessProfileNew, + ) -> CustomResult { + self.diesel_store + .insert_business_profile(business_profile) + .await + } + + async fn find_business_profile_by_profile_id( + &self, + profile_id: &str, + ) -> CustomResult { + self.diesel_store + .find_business_profile_by_profile_id(profile_id) + .await + } + + async fn update_business_profile_by_profile_id( + &self, + current_state: business_profile::BusinessProfile, + business_profile_update: business_profile::BusinessProfileUpdateInternal, + ) -> CustomResult { + self.diesel_store + .update_business_profile_by_profile_id(current_state, business_profile_update) + .await + } + + async fn delete_business_profile_by_profile_id_merchant_id( + &self, + profile_id: &str, + merchant_id: &str, + ) -> CustomResult { + self.diesel_store + .delete_business_profile_by_profile_id_merchant_id(profile_id, merchant_id) + .await + } + + async fn list_business_profile_by_merchant_id( + &self, + merchant_id: &str, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .list_business_profile_by_merchant_id(merchant_id) + .await + } + + async fn find_business_profile_by_profile_name_merchant_id( + &self, + profile_name: &str, + merchant_id: &str, + ) -> CustomResult { + self.diesel_store + .find_business_profile_by_profile_name_merchant_id(profile_name, merchant_id) + .await + } +} + +#[async_trait::async_trait] +impl ReverseLookupInterface for KafkaStore { + async fn insert_reverse_lookup( + &self, + new: ReverseLookupNew, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + self.diesel_store + .insert_reverse_lookup(new, storage_scheme) + .await + } + + async fn get_lookup_by_lookup_id( + &self, + id: &str, + storage_scheme: MerchantStorageScheme, + ) -> CustomResult { + self.diesel_store + .get_lookup_by_lookup_id(id, storage_scheme) + .await + } +} + +#[async_trait::async_trait] +impl RoutingAlgorithmInterface for KafkaStore { + async fn insert_routing_algorithm( + &self, + routing_algorithm: storage::RoutingAlgorithm, + ) -> CustomResult { + self.diesel_store + .insert_routing_algorithm(routing_algorithm) + .await + } + + async fn find_routing_algorithm_by_profile_id_algorithm_id( + &self, + profile_id: &str, + algorithm_id: &str, + ) -> CustomResult { + self.diesel_store + .find_routing_algorithm_by_profile_id_algorithm_id(profile_id, algorithm_id) + .await + } + + async fn find_routing_algorithm_by_algorithm_id_merchant_id( + &self, + algorithm_id: &str, + merchant_id: &str, + ) -> CustomResult { + self.diesel_store + .find_routing_algorithm_by_algorithm_id_merchant_id(algorithm_id, merchant_id) + .await + } + + async fn find_routing_algorithm_metadata_by_algorithm_id_profile_id( + &self, + algorithm_id: &str, + profile_id: &str, + ) -> CustomResult { + self.diesel_store + .find_routing_algorithm_metadata_by_algorithm_id_profile_id(algorithm_id, profile_id) + .await + } + + async fn list_routing_algorithm_metadata_by_profile_id( + &self, + profile_id: &str, + limit: i64, + offset: i64, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .list_routing_algorithm_metadata_by_profile_id(profile_id, limit, offset) + .await + } + + async fn list_routing_algorithm_metadata_by_merchant_id( + &self, + merchant_id: &str, + limit: i64, + offset: i64, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .list_routing_algorithm_metadata_by_merchant_id(merchant_id, limit, offset) + .await + } +} + +#[async_trait::async_trait] +impl GsmInterface for KafkaStore { + async fn add_gsm_rule( + &self, + rule: storage::GatewayStatusMappingNew, + ) -> CustomResult { + self.diesel_store.add_gsm_rule(rule).await + } + + async fn find_gsm_decision( + &self, + connector: String, + flow: String, + sub_flow: String, + code: String, + message: String, + ) -> CustomResult { + self.diesel_store + .find_gsm_decision(connector, flow, sub_flow, code, message) + .await + } + + async fn find_gsm_rule( + &self, + connector: String, + flow: String, + sub_flow: String, + code: String, + message: String, + ) -> CustomResult { + self.diesel_store + .find_gsm_rule(connector, flow, sub_flow, code, message) + .await + } + + async fn update_gsm_rule( + &self, + connector: String, + flow: String, + sub_flow: String, + code: String, + message: String, + data: storage::GatewayStatusMappingUpdate, + ) -> CustomResult { + self.diesel_store + .update_gsm_rule(connector, flow, sub_flow, code, message, data) + .await + } + + async fn delete_gsm_rule( + &self, + connector: String, + flow: String, + sub_flow: String, + code: String, + message: String, + ) -> CustomResult { + self.diesel_store + .delete_gsm_rule(connector, flow, sub_flow, code, message) + .await + } +} + +#[async_trait::async_trait] +impl StorageInterface for KafkaStore { + fn get_scheduler_db(&self) -> Box { + Box::new(self.clone()) + } +} + +#[async_trait::async_trait] +impl SchedulerInterface for KafkaStore {} + +impl MasterKeyInterface for KafkaStore { + fn get_master_key(&self) -> &[u8] { + self.diesel_store.get_master_key() + } +} +#[async_trait::async_trait] +impl UserInterface for KafkaStore { + async fn insert_user( + &self, + user_data: storage::UserNew, + ) -> CustomResult { + self.diesel_store.insert_user(user_data).await + } + + async fn find_user_by_email( + &self, + user_email: &str, + ) -> CustomResult { + self.diesel_store.find_user_by_email(user_email).await + } + + async fn find_user_by_id( + &self, + user_id: &str, + ) -> CustomResult { + self.diesel_store.find_user_by_id(user_id).await + } + + async fn update_user_by_user_id( + &self, + user_id: &str, + user: storage::UserUpdate, + ) -> CustomResult { + self.diesel_store + .update_user_by_user_id(user_id, user) + .await + } + + async fn delete_user_by_user_id( + &self, + user_id: &str, + ) -> CustomResult { + self.diesel_store.delete_user_by_user_id(user_id).await + } + + async fn find_users_and_roles_by_merchant_id( + &self, + merchant_id: &str, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .find_users_and_roles_by_merchant_id(merchant_id) + .await + } +} + +impl RedisConnInterface for KafkaStore { + fn get_redis_conn(&self) -> CustomResult, RedisError> { + self.diesel_store.get_redis_conn() + } +} + +#[async_trait::async_trait] +impl UserRoleInterface for KafkaStore { + async fn insert_user_role( + &self, + user_role: user_storage::UserRoleNew, + ) -> CustomResult { + self.diesel_store.insert_user_role(user_role).await + } + async fn find_user_role_by_user_id( + &self, + user_id: &str, + ) -> CustomResult { + self.diesel_store.find_user_role_by_user_id(user_id).await + } + async fn update_user_role_by_user_id_merchant_id( + &self, + user_id: &str, + merchant_id: &str, + update: user_storage::UserRoleUpdate, + ) -> CustomResult { + self.diesel_store + .update_user_role_by_user_id_merchant_id(user_id, merchant_id, update) + .await + } + async fn delete_user_role(&self, user_id: &str) -> CustomResult { + self.diesel_store.delete_user_role(user_id).await + } + async fn list_user_roles_by_user_id( + &self, + user_id: &str, + ) -> CustomResult, errors::StorageError> { + self.diesel_store.list_user_roles_by_user_id(user_id).await + } +} + +#[async_trait::async_trait] +impl DashboardMetadataInterface for KafkaStore { + async fn insert_metadata( + &self, + metadata: storage::DashboardMetadataNew, + ) -> CustomResult { + self.diesel_store.insert_metadata(metadata).await + } + + async fn update_metadata( + &self, + user_id: Option, + merchant_id: String, + org_id: String, + data_key: enums::DashboardMetadata, + dashboard_metadata_update: storage::DashboardMetadataUpdate, + ) -> CustomResult { + self.diesel_store + .update_metadata( + user_id, + merchant_id, + org_id, + data_key, + dashboard_metadata_update, + ) + .await + } + + async fn find_user_scoped_dashboard_metadata( + &self, + user_id: &str, + merchant_id: &str, + org_id: &str, + data_keys: Vec, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .find_user_scoped_dashboard_metadata(user_id, merchant_id, org_id, data_keys) + .await + } + async fn find_merchant_scoped_dashboard_metadata( + &self, + merchant_id: &str, + org_id: &str, + data_keys: Vec, + ) -> CustomResult, errors::StorageError> { + self.diesel_store + .find_merchant_scoped_dashboard_metadata(merchant_id, org_id, data_keys) + .await + } +} + +#[async_trait::async_trait] +impl BatchSampleDataInterface for KafkaStore { + async fn insert_payment_intents_batch_for_sample_data( + &self, + batch: Vec, + ) -> CustomResult, data_models::errors::StorageError> + { + let payment_intents_list = self + .diesel_store + .insert_payment_intents_batch_for_sample_data(batch) + .await?; + + for payment_intent in payment_intents_list.iter() { + let _ = self + .kafka_producer + .log_payment_intent(payment_intent, None) + .await; + } + Ok(payment_intents_list) + } + + async fn insert_payment_attempts_batch_for_sample_data( + &self, + batch: Vec, + ) -> CustomResult< + Vec, + data_models::errors::StorageError, + > { + let payment_attempts_list = self + .diesel_store + .insert_payment_attempts_batch_for_sample_data(batch) + .await?; + + for payment_attempt in payment_attempts_list.iter() { + let _ = self + .kafka_producer + .log_payment_attempt(payment_attempt, None) + .await; + } + Ok(payment_attempts_list) + } + + async fn insert_refunds_batch_for_sample_data( + &self, + batch: Vec, + ) -> CustomResult, data_models::errors::StorageError> { + let refunds_list = self + .diesel_store + .insert_refunds_batch_for_sample_data(batch) + .await?; + + for refund in refunds_list.iter() { + let _ = self.kafka_producer.log_refund(refund, None).await; + } + Ok(refunds_list) + } + + async fn delete_payment_intents_for_sample_data( + &self, + merchant_id: &str, + ) -> CustomResult, data_models::errors::StorageError> + { + let payment_intents_list = self + .diesel_store + .delete_payment_intents_for_sample_data(merchant_id) + .await?; + + for payment_intent in payment_intents_list.iter() { + let _ = self + .kafka_producer + .log_payment_intent_delete(payment_intent) + .await; + } + Ok(payment_intents_list) + } + + async fn delete_payment_attempts_for_sample_data( + &self, + merchant_id: &str, + ) -> CustomResult< + Vec, + data_models::errors::StorageError, + > { + let payment_attempts_list = self + .diesel_store + .delete_payment_attempts_for_sample_data(merchant_id) + .await?; + + for payment_attempt in payment_attempts_list.iter() { + let _ = self + .kafka_producer + .log_payment_attempt_delete(payment_attempt) + .await; + } + + Ok(payment_attempts_list) + } + + async fn delete_refunds_for_sample_data( + &self, + merchant_id: &str, + ) -> CustomResult, data_models::errors::StorageError> { + let refunds_list = self + .diesel_store + .delete_refunds_for_sample_data(merchant_id) + .await?; + + for refund in refunds_list.iter() { + let _ = self.kafka_producer.log_refund_delete(refund).await; + } + + Ok(refunds_list) + } +} diff --git a/crates/router/src/db/user.rs b/crates/router/src/db/user.rs index 6bb1d9e50b6a..e3dda965f9c9 100644 --- a/crates/router/src/db/user.rs +++ b/crates/router/src/db/user.rs @@ -1,4 +1,4 @@ -use diesel_models::user as storage; +use diesel_models::{user as storage, user_role::UserRole}; use error_stack::{IntoReport, ResultExt}; use masking::Secret; @@ -8,6 +8,7 @@ use crate::{ core::errors::{self, CustomResult}, services::Store, }; +pub mod sample_data; #[async_trait::async_trait] pub trait UserInterface { @@ -36,6 +37,11 @@ pub trait UserInterface { &self, user_id: &str, ) -> CustomResult; + + async fn find_users_and_roles_by_merchant_id( + &self, + merchant_id: &str, + ) -> CustomResult, errors::StorageError>; } #[async_trait::async_trait] @@ -96,6 +102,17 @@ impl UserInterface for Store { .map_err(Into::into) .into_report() } + + async fn find_users_and_roles_by_merchant_id( + &self, + merchant_id: &str, + ) -> CustomResult, errors::StorageError> { + let conn = connection::pg_connection_write(self).await?; + storage::User::find_joined_users_and_roles_by_merchant_id(&conn, merchant_id) + .await + .map_err(Into::into) + .into_report() + } } #[async_trait::async_trait] @@ -221,45 +238,11 @@ impl UserInterface for MockDb { users.remove(user_index); Ok(true) } -} -#[cfg(feature = "kafka_events")] -#[async_trait::async_trait] -impl UserInterface for super::KafkaStore { - async fn insert_user( - &self, - user_data: storage::UserNew, - ) -> CustomResult { - self.diesel_store.insert_user(user_data).await - } - async fn find_user_by_email( + async fn find_users_and_roles_by_merchant_id( &self, - user_email: &str, - ) -> CustomResult { - self.diesel_store.find_user_by_email(user_email).await - } - - async fn find_user_by_id( - &self, - user_id: &str, - ) -> CustomResult { - self.diesel_store.find_user_by_id(user_id).await - } - - async fn update_user_by_user_id( - &self, - user_id: &str, - user: storage::UserUpdate, - ) -> CustomResult { - self.diesel_store - .update_user_by_user_id(user_id, user) - .await - } - - async fn delete_user_by_user_id( - &self, - user_id: &str, - ) -> CustomResult { - self.diesel_store.delete_user_by_user_id(user_id).await + _merchant_id: &str, + ) -> CustomResult, errors::StorageError> { + Err(errors::StorageError::MockDbError)? } } diff --git a/crates/router/src/db/user/sample_data.rs b/crates/router/src/db/user/sample_data.rs new file mode 100644 index 000000000000..11def9026854 --- /dev/null +++ b/crates/router/src/db/user/sample_data.rs @@ -0,0 +1,205 @@ +use data_models::{ + errors::StorageError, + payments::{payment_attempt::PaymentAttempt, payment_intent::PaymentIntentNew, PaymentIntent}, +}; +use diesel_models::{ + errors::DatabaseError, + query::user::sample_data as sample_data_queries, + refund::{Refund, RefundNew}, + user::sample_data::PaymentAttemptBatchNew, +}; +use error_stack::{Report, ResultExt}; +use storage_impl::DataModelExt; + +use crate::{connection::pg_connection_write, core::errors::CustomResult, services::Store}; + +#[async_trait::async_trait] +pub trait BatchSampleDataInterface { + async fn insert_payment_intents_batch_for_sample_data( + &self, + batch: Vec, + ) -> CustomResult, StorageError>; + + async fn insert_payment_attempts_batch_for_sample_data( + &self, + batch: Vec, + ) -> CustomResult, StorageError>; + + async fn insert_refunds_batch_for_sample_data( + &self, + batch: Vec, + ) -> CustomResult, StorageError>; + + async fn delete_payment_intents_for_sample_data( + &self, + merchant_id: &str, + ) -> CustomResult, StorageError>; + + async fn delete_payment_attempts_for_sample_data( + &self, + merchant_id: &str, + ) -> CustomResult, StorageError>; + + async fn delete_refunds_for_sample_data( + &self, + merchant_id: &str, + ) -> CustomResult, StorageError>; +} + +#[async_trait::async_trait] +impl BatchSampleDataInterface for Store { + async fn insert_payment_intents_batch_for_sample_data( + &self, + batch: Vec, + ) -> CustomResult, StorageError> { + let conn = pg_connection_write(self) + .await + .change_context(StorageError::DatabaseConnectionError)?; + let new_intents = batch.into_iter().map(|i| i.to_storage_model()).collect(); + sample_data_queries::insert_payment_intents(&conn, new_intents) + .await + .map_err(diesel_error_to_data_error) + .map(|v| { + v.into_iter() + .map(PaymentIntent::from_storage_model) + .collect() + }) + } + + async fn insert_payment_attempts_batch_for_sample_data( + &self, + batch: Vec, + ) -> CustomResult, StorageError> { + let conn = pg_connection_write(self) + .await + .change_context(StorageError::DatabaseConnectionError)?; + sample_data_queries::insert_payment_attempts(&conn, batch) + .await + .map_err(diesel_error_to_data_error) + .map(|res| { + res.into_iter() + .map(PaymentAttempt::from_storage_model) + .collect() + }) + } + async fn insert_refunds_batch_for_sample_data( + &self, + batch: Vec, + ) -> CustomResult, StorageError> { + let conn = pg_connection_write(self) + .await + .change_context(StorageError::DatabaseConnectionError)?; + sample_data_queries::insert_refunds(&conn, batch) + .await + .map_err(diesel_error_to_data_error) + } + + async fn delete_payment_intents_for_sample_data( + &self, + merchant_id: &str, + ) -> CustomResult, StorageError> { + let conn = pg_connection_write(self) + .await + .change_context(StorageError::DatabaseConnectionError)?; + sample_data_queries::delete_payment_intents(&conn, merchant_id) + .await + .map_err(diesel_error_to_data_error) + .map(|v| { + v.into_iter() + .map(PaymentIntent::from_storage_model) + .collect() + }) + } + + async fn delete_payment_attempts_for_sample_data( + &self, + merchant_id: &str, + ) -> CustomResult, StorageError> { + let conn = pg_connection_write(self) + .await + .change_context(StorageError::DatabaseConnectionError)?; + sample_data_queries::delete_payment_attempts(&conn, merchant_id) + .await + .map_err(diesel_error_to_data_error) + .map(|res| { + res.into_iter() + .map(PaymentAttempt::from_storage_model) + .collect() + }) + } + async fn delete_refunds_for_sample_data( + &self, + merchant_id: &str, + ) -> CustomResult, StorageError> { + let conn = pg_connection_write(self) + .await + .change_context(StorageError::DatabaseConnectionError)?; + sample_data_queries::delete_refunds(&conn, merchant_id) + .await + .map_err(diesel_error_to_data_error) + } +} + +#[async_trait::async_trait] +impl BatchSampleDataInterface for storage_impl::MockDb { + async fn insert_payment_intents_batch_for_sample_data( + &self, + _batch: Vec, + ) -> CustomResult, StorageError> { + Err(StorageError::MockDbError)? + } + + async fn insert_payment_attempts_batch_for_sample_data( + &self, + _batch: Vec, + ) -> CustomResult, StorageError> { + Err(StorageError::MockDbError)? + } + + async fn insert_refunds_batch_for_sample_data( + &self, + _batch: Vec, + ) -> CustomResult, StorageError> { + Err(StorageError::MockDbError)? + } + + async fn delete_payment_intents_for_sample_data( + &self, + _merchant_id: &str, + ) -> CustomResult, StorageError> { + Err(StorageError::MockDbError)? + } + async fn delete_payment_attempts_for_sample_data( + &self, + _merchant_id: &str, + ) -> CustomResult, StorageError> { + Err(StorageError::MockDbError)? + } + async fn delete_refunds_for_sample_data( + &self, + _merchant_id: &str, + ) -> CustomResult, StorageError> { + Err(StorageError::MockDbError)? + } +} + +// TODO: This error conversion is re-used from storage_impl and is not DRY when it should be +// Ideally the impl's here should be defined in that crate avoiding this re-definition +fn diesel_error_to_data_error(diesel_error: Report) -> Report { + let new_err = match diesel_error.current_context() { + DatabaseError::DatabaseConnectionError => StorageError::DatabaseConnectionError, + DatabaseError::NotFound => StorageError::ValueNotFound("Value not found".to_string()), + DatabaseError::UniqueViolation => StorageError::DuplicateValue { + entity: "entity ", + key: None, + }, + DatabaseError::NoFieldsToUpdate => { + StorageError::DatabaseError("No fields to update".to_string()) + } + DatabaseError::QueryGenerationFailed => { + StorageError::DatabaseError("Query generation failed".to_string()) + } + DatabaseError::Others => StorageError::DatabaseError("Others".to_string()), + }; + diesel_error.change_context(new_err) +} diff --git a/crates/router/src/events.rs b/crates/router/src/events.rs index 39a8543a68c4..8f980fee504a 100644 --- a/crates/router/src/events.rs +++ b/crates/router/src/events.rs @@ -1,15 +1,21 @@ -use serde::Serialize; +use data_models::errors::{StorageError, StorageResult}; +use error_stack::ResultExt; +use serde::{Deserialize, Serialize}; +use storage_impl::errors::ApplicationError; + +use crate::{db::KafkaProducer, services::kafka::KafkaSettings}; pub mod api_logs; pub mod event_logger; +pub mod kafka_handler; -pub trait EventHandler: Sync + Send + dyn_clone::DynClone { +pub(super) trait EventHandler: Sync + Send + dyn_clone::DynClone { fn log_event(&self, event: RawEvent); } dyn_clone::clone_trait_object!(EventHandler); -#[derive(Debug)] +#[derive(Debug, Serialize)] pub struct RawEvent { pub event_type: EventType, pub key: String, @@ -24,3 +30,55 @@ pub enum EventType { Refund, ApiLogs, } + +#[derive(Debug, Default, Deserialize, Clone)] +#[serde(tag = "source")] +#[serde(rename_all = "lowercase")] +pub enum EventsConfig { + Kafka { + kafka: KafkaSettings, + }, + #[default] + Logs, +} + +#[derive(Debug, Clone)] +pub enum EventsHandler { + Kafka(KafkaProducer), + Logs(event_logger::EventLogger), +} + +impl Default for EventsHandler { + fn default() -> Self { + Self::Logs(event_logger::EventLogger {}) + } +} + +impl EventsConfig { + pub async fn get_event_handler(&self) -> StorageResult { + Ok(match self { + Self::Kafka { kafka } => EventsHandler::Kafka( + KafkaProducer::create(kafka) + .await + .change_context(StorageError::InitializationError)?, + ), + Self::Logs => EventsHandler::Logs(event_logger::EventLogger::default()), + }) + } + + pub fn validate(&self) -> Result<(), ApplicationError> { + match self { + Self::Kafka { kafka } => kafka.validate(), + Self::Logs => Ok(()), + } + } +} + +impl EventsHandler { + pub fn log_event(&self, event: RawEvent) { + match self { + Self::Kafka(kafka) => kafka.log_event(event), + Self::Logs(logger) => logger.log_event(event), + } + } +} diff --git a/crates/router/src/events/api_logs.rs b/crates/router/src/events/api_logs.rs index 3f598e88394b..bfc10f722c1f 100644 --- a/crates/router/src/events/api_logs.rs +++ b/crates/router/src/events/api_logs.rs @@ -24,6 +24,7 @@ use crate::{ #[derive(Clone, Debug, Eq, PartialEq, Serialize)] #[serde(rename_all = "snake_case")] pub struct ApiEvent { + merchant_id: Option, api_flow: String, created_at_timestamp: i128, request_id: String, @@ -40,11 +41,13 @@ pub struct ApiEvent { #[serde(flatten)] event_type: ApiEventsType, hs_latency: Option, + http_method: Option, } impl ApiEvent { #[allow(clippy::too_many_arguments)] pub fn new( + merchant_id: Option, api_flow: &impl FlowMetric, request_id: &RequestId, latency: u128, @@ -56,8 +59,10 @@ impl ApiEvent { error: Option, event_type: ApiEventsType, http_req: &HttpRequest, + http_method: Option, ) -> Self { Self { + merchant_id, api_flow: api_flow.to_string(), created_at_timestamp: OffsetDateTime::now_utc().unix_timestamp_nanos() / 1_000_000, request_id: request_id.as_hyphenated().to_string(), @@ -78,6 +83,7 @@ impl ApiEvent { url_path: http_req.path().to_string(), event_type, hs_latency, + http_method, } } } diff --git a/crates/router/src/events/event_logger.rs b/crates/router/src/events/event_logger.rs index fda9b1a036ae..1bd75341be4a 100644 --- a/crates/router/src/events/event_logger.rs +++ b/crates/router/src/events/event_logger.rs @@ -7,6 +7,6 @@ pub struct EventLogger {} impl EventHandler for EventLogger { #[track_caller] fn log_event(&self, event: RawEvent) { - logger::info!(event = ?serde_json::to_string(&event.payload).unwrap_or(r#"{ "error": "Serialization failed" }"#.to_string()), event_type =? event.event_type, event_id =? event.key, log_type = "event"); + logger::info!(event = ?event.payload.to_string(), event_type =? event.event_type, event_id =? event.key, log_type = "event"); } } diff --git a/crates/router/src/events/kafka_handler.rs b/crates/router/src/events/kafka_handler.rs new file mode 100644 index 000000000000..d55847e6e8e7 --- /dev/null +++ b/crates/router/src/events/kafka_handler.rs @@ -0,0 +1,29 @@ +use error_stack::{IntoReport, ResultExt}; +use router_env::tracing; + +use super::{EventHandler, RawEvent}; +use crate::{ + db::MQResult, + services::kafka::{KafkaError, KafkaMessage, KafkaProducer}, +}; +impl EventHandler for KafkaProducer { + fn log_event(&self, event: RawEvent) { + let topic = self.get_topic(event.event_type); + if let Err(er) = self.log_kafka_event(topic, &event) { + tracing::error!("Failed to log event to kafka: {:?}", er); + } + } +} + +impl KafkaMessage for RawEvent { + fn key(&self) -> String { + self.key.clone() + } + + fn value(&self) -> MQResult> { + // Add better error logging here + serde_json::to_vec(&self.payload) + .into_report() + .change_context(KafkaError::GenericError) + } +} diff --git a/crates/router/src/lib.rs b/crates/router/src/lib.rs index 2b1f9c692d86..035314f71dfb 100644 --- a/crates/router/src/lib.rs +++ b/crates/router/src/lib.rs @@ -1,8 +1,6 @@ #![forbid(unsafe_code)] #![recursion_limit = "256"] -#[cfg(feature = "olap")] -pub mod analytics; #[cfg(feature = "stripe")] pub mod compatibility; pub mod configs; @@ -17,6 +15,8 @@ pub(crate) mod macros; pub mod routes; pub mod workflows; +#[cfg(feature = "olap")] +pub mod analytics; pub mod events; pub mod middleware; pub mod openapi; @@ -35,10 +35,7 @@ use storage_impl::errors::ApplicationResult; use tokio::sync::{mpsc, oneshot}; pub use self::env::logger; -use crate::{ - configs::settings, - core::errors::{self}, -}; +use crate::{configs::settings, core::errors}; #[cfg(feature = "mimalloc")] #[global_allocator] diff --git a/crates/router/src/routes.rs b/crates/router/src/routes.rs index 37cc1339e1a1..b19ef5d7016b 100644 --- a/crates/router/src/routes.rs +++ b/crates/router/src/routes.rs @@ -27,8 +27,12 @@ pub mod refunds; pub mod routing; #[cfg(feature = "olap")] pub mod user; +#[cfg(feature = "olap")] +pub mod user_role; #[cfg(all(feature = "olap", feature = "kms"))] pub mod verification; +#[cfg(feature = "olap")] +pub mod verify_connector; pub mod webhooks; pub mod locker_migration; diff --git a/crates/router/src/routes/app.rs b/crates/router/src/routes/app.rs index ae0e0f04f598..a145f3e7e5d7 100644 --- a/crates/router/src/routes/app.rs +++ b/crates/router/src/routes/app.rs @@ -1,10 +1,14 @@ use std::sync::Arc; use actix_web::{web, Scope}; +#[cfg(all(feature = "kms", feature = "olap"))] +use analytics::AnalyticsConfig; #[cfg(feature = "email")] -use external_services::email::{AwsSes, EmailClient}; +use external_services::email::{ses::AwsSes, EmailService}; #[cfg(feature = "kms")] use external_services::kms::{self, decrypt::KmsDecrypt}; +#[cfg(all(feature = "olap", feature = "kms"))] +use masking::PeekInterface; use router_env::tracing_actix_web::RequestId; use scheduler::SchedulerInterface; use storage_impl::MockDb; @@ -23,17 +27,19 @@ use super::verification::{apple_pay_merchant_registration, retrieve_apple_pay_ve #[cfg(feature = "olap")] use super::{ admin::*, api_keys::*, disputes::*, files::*, gsm::*, locker_migration, payment_link::*, - user::*, + user::*, user_role::*, }; use super::{cache::*, health::*}; #[cfg(any(feature = "olap", feature = "oltp"))] use super::{configs::*, customers::*, mandates::*, payments::*, refunds::*}; #[cfg(feature = "oltp")] use super::{ephemeral_key::*, payment_methods::*, webhooks::*}; +#[cfg(feature = "olap")] +use crate::routes::verify_connector::payment_connector_verify; pub use crate::{ configs::settings, db::{StorageImpl, StorageInterface}, - events::{event_logger::EventLogger, EventHandler}, + events::EventsHandler, routes::cards_info::card_iin_info, services::get_store, }; @@ -43,9 +49,9 @@ pub struct AppState { pub flow_name: String, pub store: Box, pub conf: Arc, - pub event_handler: Box, + pub event_handler: EventsHandler, #[cfg(feature = "email")] - pub email_client: Arc, + pub email_client: Arc, #[cfg(feature = "kms")] pub kms_secrets: Arc, pub api_client: Box, @@ -62,9 +68,9 @@ impl scheduler::SchedulerAppState for AppState { pub trait AppStateInfo { fn conf(&self) -> settings::Settings; fn store(&self) -> Box; - fn event_handler(&self) -> Box; + fn event_handler(&self) -> EventsHandler; #[cfg(feature = "email")] - fn email_client(&self) -> Arc; + fn email_client(&self) -> Arc; fn add_request_id(&mut self, request_id: RequestId); fn add_merchant_id(&mut self, merchant_id: Option); fn add_flow_name(&mut self, flow_name: String); @@ -79,11 +85,11 @@ impl AppStateInfo for AppState { self.store.to_owned() } #[cfg(feature = "email")] - fn email_client(&self) -> Arc { + fn email_client(&self) -> Arc { self.email_client.to_owned() } - fn event_handler(&self) -> Box { - self.event_handler.to_owned() + fn event_handler(&self) -> EventsHandler { + self.event_handler.clone() } fn add_request_id(&mut self, request_id: RequestId) { self.api_client.add_request_id(request_id); @@ -107,12 +113,22 @@ impl AsRef for AppState { } } +#[cfg(feature = "email")] +pub async fn create_email_client(settings: &settings::Settings) -> impl EmailService { + match settings.email.active_email_client { + external_services::email::AvailableEmailClients::SES => { + AwsSes::create(&settings.email, settings.proxy.https_url.to_owned()).await + } + } +} + impl AppState { /// # Panics /// /// Panics if Store can't be created or JWE decryption fails pub async fn with_storage( - conf: settings::Settings, + #[cfg_attr(not(all(feature = "olap", feature = "kms")), allow(unused_mut))] + mut conf: settings::Settings, storage_impl: StorageImpl, shut_down_signal: oneshot::Sender<()>, api_client: Box, @@ -121,13 +137,31 @@ impl AppState { #[cfg(feature = "kms")] let kms_client = kms::get_kms_client(&conf.kms).await; let testable = storage_impl == StorageImpl::PostgresqlTest; + #[allow(clippy::expect_used)] + let event_handler = conf + .events + .get_event_handler() + .await + .expect("Failed to create event handler"); let store: Box = match storage_impl { - StorageImpl::Postgresql | StorageImpl::PostgresqlTest => Box::new( - #[allow(clippy::expect_used)] - get_store(&conf, shut_down_signal, testable) - .await - .expect("Failed to create store"), - ), + StorageImpl::Postgresql | StorageImpl::PostgresqlTest => match &event_handler { + EventsHandler::Kafka(kafka_client) => Box::new( + crate::db::KafkaStore::new( + #[allow(clippy::expect_used)] + get_store(&conf.clone(), shut_down_signal, testable) + .await + .expect("Failed to create store"), + kafka_client.clone(), + ) + .await, + ), + EventsHandler::Logs(_) => Box::new( + #[allow(clippy::expect_used)] + get_store(&conf, shut_down_signal, testable) + .await + .expect("Failed to create store"), + ), + }, #[allow(clippy::expect_used)] StorageImpl::Mock => Box::new( MockDb::new(&conf.redis) @@ -136,13 +170,23 @@ impl AppState { ), }; + #[cfg(all(feature = "kms", feature = "olap"))] + #[allow(clippy::expect_used)] + match conf.analytics { + AnalyticsConfig::Clickhouse { .. } => {} + AnalyticsConfig::Sqlx { ref mut sqlx } + | AnalyticsConfig::CombinedCkh { ref mut sqlx, .. } + | AnalyticsConfig::CombinedSqlx { ref mut sqlx, .. } => { + sqlx.password = kms_client + .decrypt(&sqlx.password.peek()) + .await + .expect("Failed to decrypt password") + .into(); + } + }; + #[cfg(feature = "olap")] - let pool = crate::analytics::AnalyticsProvider::from_conf( - &conf.analytics, - #[cfg(feature = "kms")] - kms_client, - ) - .await; + let pool = crate::analytics::AnalyticsProvider::from_conf(&conf.analytics).await; #[cfg(feature = "kms")] #[allow(clippy::expect_used)] @@ -154,7 +198,8 @@ impl AppState { .expect("Failed while performing KMS decryption"); #[cfg(feature = "email")] - let email_client = Arc::new(AwsSes::new(&conf.email).await); + let email_client = Arc::new(create_email_client(&conf).await); + Self { flow_name: String::from("default"), store, @@ -164,7 +209,7 @@ impl AppState { #[cfg(feature = "kms")] kms_secrets: Arc::new(kms_secrets), api_client, - event_handler: Box::::default(), + event_handler, #[cfg(feature = "olap")] pool, } @@ -525,6 +570,10 @@ impl MerchantConnectorAccount { use super::admin::*; route = route + .service( + web::resource("/connectors/verify") + .route(web::post().to(payment_connector_verify)), + ) .service( web::resource("/{merchant_id}/connectors") .route(web::post().to(payment_connector_create)) @@ -771,13 +820,42 @@ pub struct User; #[cfg(feature = "olap")] impl User { pub fn server(state: AppState) -> Scope { - web::scope("/user") - .app_data(web::Data::new(state)) + let mut route = web::scope("/user").app_data(web::Data::new(state)); + + route = route .service(web::resource("/signin").route(web::post().to(user_connect_account))) .service(web::resource("/signup").route(web::post().to(user_connect_account))) .service(web::resource("/v2/signin").route(web::post().to(user_connect_account))) .service(web::resource("/v2/signup").route(web::post().to(user_connect_account))) .service(web::resource("/change_password").route(web::post().to(change_password))) + .service( + web::resource("/data/merchant") + .route(web::post().to(set_merchant_scoped_dashboard_metadata)), + ) + .service(web::resource("/data").route(web::get().to(get_multiple_dashboard_metadata))) + .service(web::resource("/internal_signup").route(web::post().to(internal_user_signup))) + .service(web::resource("/switch_merchant").route(web::post().to(switch_merchant_id))) + .service( + web::resource("/create_merchant") + .route(web::post().to(user_merchant_account_create)), + ) + .service(web::resource("/switch/list").route(web::get().to(list_merchant_ids_for_user))) + .service(web::resource("/user/list").route(web::get().to(get_user_details))) + // User Role APIs + .service(web::resource("/permission_info").route(web::get().to(get_authorization_info))) + .service(web::resource("/user/update_role").route(web::post().to(update_user_role))) + .service(web::resource("/role/list").route(web::get().to(list_roles))) + .service(web::resource("/role/{role_id}").route(web::get().to(get_role))); + + #[cfg(feature = "dummy_connector")] + { + route = route.service( + web::resource("/sample_data") + .route(web::post().to(generate_sample_data)) + .route(web::delete().to(delete_sample_data)), + ) + } + route } } diff --git a/crates/router/src/routes/lock_utils.rs b/crates/router/src/routes/lock_utils.rs index 5c2ad123749c..6aa2bbad0b15 100644 --- a/crates/router/src/routes/lock_utils.rs +++ b/crates/router/src/routes/lock_utils.rs @@ -27,6 +27,7 @@ pub enum ApiIdentifier { RustLockerMigration, Gsm, User, + UserRole, } impl From for ApiIdentifier { @@ -147,7 +148,22 @@ impl From for ApiIdentifier { | Flow::GsmRuleUpdate | Flow::GsmRuleDelete => Self::Gsm, - Flow::UserConnectAccount | Flow::ChangePassword => Self::User, + Flow::UserConnectAccount + | Flow::ChangePassword + | Flow::SetDashboardMetadata + | Flow::GetMutltipleDashboardMetadata + | Flow::VerifyPaymentConnector + | Flow::InternalUserSignup + | Flow::SwitchMerchant + | Flow::UserMerchantAccountCreate + | Flow::GenerateSampleData + | Flow::DeleteSampleData + | Flow::UserMerchantAccountList + | Flow::GetUserDetails => Self::User, + + Flow::ListRoles | Flow::GetRole | Flow::UpdateUserRole | Flow::GetAuthorizationInfo => { + Self::UserRole + } } } } diff --git a/crates/router/src/routes/metrics.rs b/crates/router/src/routes/metrics.rs index a8e6f9d2a892..192df1a09298 100644 --- a/crates/router/src/routes/metrics.rs +++ b/crates/router/src/routes/metrics.rs @@ -85,6 +85,7 @@ counter_metric!(CONNECTOR_HTTP_STATUS_CODE_5XX_COUNT, GLOBAL_METER); // Service Level counter_metric!(CARD_LOCKER_FAILURES, GLOBAL_METER); +counter_metric!(CARD_LOCKER_SUCCESSFUL_RESPONSE, GLOBAL_METER); counter_metric!(TEMP_LOCKER_FAILURES, GLOBAL_METER); histogram_metric!(CARD_ADD_TIME, GLOBAL_METER); histogram_metric!(CARD_GET_TIME, GLOBAL_METER); diff --git a/crates/router/src/routes/user.rs b/crates/router/src/routes/user.rs index 7d3d183eda76..97bd7054da9e 100644 --- a/crates/router/src/routes/user.rs +++ b/crates/router/src/routes/user.rs @@ -1,14 +1,22 @@ use actix_web::{web, HttpRequest, HttpResponse}; -use api_models::user as user_api; +#[cfg(feature = "dummy_connector")] +use api_models::user::sample_data::SampleDataRequest; +use api_models::{ + errors::types::ApiErrorResponse, + user::{self as user_api}, +}; +use common_utils::errors::ReportSwitchExt; use router_env::Flow; use super::AppState; use crate::{ - core::{api_locking, user}, + core::{api_locking, user as user_core}, services::{ api, authentication::{self as auth}, + authorization::permissions::Permission, }, + utils::user::dashboard_metadata::{parse_string_to_enums, set_ip_address_if_required}, }; pub async fn user_connect_account( @@ -23,7 +31,7 @@ pub async fn user_connect_account( state, &http_req, req_payload.clone(), - |state, _, req_body| user::connect_account(state, req_body), + |state, _, req_body| user_core::connect_account(state, req_body), &auth::NoAuth, api_locking::LockAction::NotApplicable, )) @@ -41,9 +49,189 @@ pub async fn change_password( state.clone(), &http_req, json_payload.into_inner(), - |state, user, req| user::change_password(state, req, user), + |state, user, req| user_core::change_password(state, req, user), + &auth::DashboardNoPermissionAuth, + api_locking::LockAction::NotApplicable, + )) + .await +} + +pub async fn set_merchant_scoped_dashboard_metadata( + state: web::Data, + req: HttpRequest, + json_payload: web::Json, +) -> HttpResponse { + let flow = Flow::SetDashboardMetadata; + let mut payload = json_payload.into_inner(); + + if let Err(e) = common_utils::errors::ReportSwitchExt::<(), ApiErrorResponse>::switch( + set_ip_address_if_required(&mut payload, req.headers()), + ) { + return api::log_and_return_error_response(e); + } + + Box::pin(api::server_wrap( + flow, + state, + &req, + payload, + user_core::dashboard_metadata::set_metadata, + &auth::JWTAuth(Permission::MerchantAccountWrite), + api_locking::LockAction::NotApplicable, + )) + .await +} + +pub async fn get_multiple_dashboard_metadata( + state: web::Data, + req: HttpRequest, + query: web::Query, +) -> HttpResponse { + let flow = Flow::GetMutltipleDashboardMetadata; + let payload = match ReportSwitchExt::<_, ApiErrorResponse>::switch(parse_string_to_enums( + query.into_inner().keys, + )) { + Ok(payload) => payload, + Err(e) => { + return api::log_and_return_error_response(e); + } + }; + Box::pin(api::server_wrap( + flow, + state, + &req, + payload, + user_core::dashboard_metadata::get_multiple_metadata, + &auth::DashboardNoPermissionAuth, + api_locking::LockAction::NotApplicable, + )) + .await +} + +pub async fn internal_user_signup( + state: web::Data, + http_req: HttpRequest, + json_payload: web::Json, +) -> HttpResponse { + let flow = Flow::InternalUserSignup; + Box::pin(api::server_wrap( + flow, + state.clone(), + &http_req, + json_payload.into_inner(), + |state, _, req| user_core::create_internal_user(state, req), + &auth::AdminApiAuth, + api_locking::LockAction::NotApplicable, + )) + .await +} + +pub async fn switch_merchant_id( + state: web::Data, + http_req: HttpRequest, + json_payload: web::Json, +) -> HttpResponse { + let flow = Flow::SwitchMerchant; + Box::pin(api::server_wrap( + flow, + state.clone(), + &http_req, + json_payload.into_inner(), + |state, user, req| user_core::switch_merchant_id(state, req, user), + &auth::DashboardNoPermissionAuth, + api_locking::LockAction::NotApplicable, + )) + .await +} + +pub async fn user_merchant_account_create( + state: web::Data, + req: HttpRequest, + json_payload: web::Json, +) -> HttpResponse { + let flow = Flow::UserMerchantAccountCreate; + Box::pin(api::server_wrap( + flow, + state, + &req, + json_payload.into_inner(), + |state, auth: auth::UserFromToken, json_payload| { + user_core::create_merchant_account(state, auth, json_payload) + }, + &auth::JWTAuth(Permission::MerchantAccountCreate), + api_locking::LockAction::NotApplicable, + )) + .await +} + +#[cfg(feature = "dummy_connector")] +pub async fn generate_sample_data( + state: web::Data, + http_req: HttpRequest, + payload: web::Json, +) -> impl actix_web::Responder { + use crate::core::user::sample_data; + + let flow = Flow::GenerateSampleData; + Box::pin(api::server_wrap( + flow, + state, + &http_req, + payload.into_inner(), + sample_data::generate_sample_data_for_user, + &auth::JWTAuth(Permission::MerchantAccountWrite), + api_locking::LockAction::NotApplicable, + )) + .await +} +#[cfg(feature = "dummy_connector")] +pub async fn delete_sample_data( + state: web::Data, + http_req: HttpRequest, + payload: web::Json, +) -> impl actix_web::Responder { + use crate::core::user::sample_data; + + let flow = Flow::DeleteSampleData; + Box::pin(api::server_wrap( + flow, + state, + &http_req, + payload.into_inner(), + sample_data::delete_sample_data_for_user, + &auth::JWTAuth(Permission::MerchantAccountWrite), + api_locking::LockAction::NotApplicable, + )) + .await +} + +pub async fn list_merchant_ids_for_user( + state: web::Data, + req: HttpRequest, +) -> HttpResponse { + let flow = Flow::UserMerchantAccountList; + Box::pin(api::server_wrap( + flow, + state, + &req, + (), + |state, user, _| user_core::list_merchant_ids_for_user(state, user), &auth::DashboardNoPermissionAuth, api_locking::LockAction::NotApplicable, )) .await } + +pub async fn get_user_details(state: web::Data, req: HttpRequest) -> HttpResponse { + let flow = Flow::GetUserDetails; + Box::pin(api::server_wrap( + flow, + state.clone(), + &req, + (), + |state, user, _| user_core::get_users_for_merchant_account(state, user), + &auth::JWTAuth(Permission::UsersRead), + api_locking::LockAction::NotApplicable, + )) + .await +} diff --git a/crates/router/src/routes/user_role.rs b/crates/router/src/routes/user_role.rs new file mode 100644 index 000000000000..c96e099ab163 --- /dev/null +++ b/crates/router/src/routes/user_role.rs @@ -0,0 +1,84 @@ +use actix_web::{web, HttpRequest, HttpResponse}; +use api_models::user_role as user_role_api; +use router_env::Flow; + +use super::AppState; +use crate::{ + core::{api_locking, user_role as user_role_core}, + services::{ + api, + authentication::{self as auth}, + authorization::permissions::Permission, + }, +}; + +pub async fn get_authorization_info( + state: web::Data, + http_req: HttpRequest, +) -> HttpResponse { + let flow = Flow::GetAuthorizationInfo; + Box::pin(api::server_wrap( + flow, + state.clone(), + &http_req, + (), + |state, _: (), _| user_role_core::get_authorization_info(state), + &auth::JWTAuth(Permission::UsersRead), + api_locking::LockAction::NotApplicable, + )) + .await +} + +pub async fn list_roles(state: web::Data, req: HttpRequest) -> HttpResponse { + let flow = Flow::ListRoles; + Box::pin(api::server_wrap( + flow, + state.clone(), + &req, + (), + |state, _: (), _| user_role_core::list_roles(state), + &auth::JWTAuth(Permission::UsersRead), + api_locking::LockAction::NotApplicable, + )) + .await +} + +pub async fn get_role( + state: web::Data, + req: HttpRequest, + path: web::Path, +) -> HttpResponse { + let flow = Flow::GetRole; + let request_payload = user_role_api::GetRoleRequest { + role_id: path.into_inner(), + }; + Box::pin(api::server_wrap( + flow, + state.clone(), + &req, + request_payload, + |state, _: (), req| user_role_core::get_role(state, req), + &auth::JWTAuth(Permission::UsersRead), + api_locking::LockAction::NotApplicable, + )) + .await +} + +pub async fn update_user_role( + state: web::Data, + req: HttpRequest, + json_payload: web::Json, +) -> HttpResponse { + let flow = Flow::UpdateUserRole; + let payload = json_payload.into_inner(); + Box::pin(api::server_wrap( + flow, + state.clone(), + &req, + payload, + user_role_core::update_user_role, + &auth::JWTAuth(Permission::UsersWrite), + api_locking::LockAction::NotApplicable, + )) + .await +} diff --git a/crates/router/src/routes/verify_connector.rs b/crates/router/src/routes/verify_connector.rs new file mode 100644 index 000000000000..bfb1b781ada4 --- /dev/null +++ b/crates/router/src/routes/verify_connector.rs @@ -0,0 +1,28 @@ +use actix_web::{web, HttpRequest, HttpResponse}; +use api_models::verify_connector::VerifyConnectorRequest; +use router_env::{instrument, tracing, Flow}; + +use super::AppState; +use crate::{ + core::{api_locking, verify_connector}, + services::{self, authentication as auth, authorization::permissions::Permission}, +}; + +#[instrument(skip_all, fields(flow = ?Flow::VerifyPaymentConnector))] +pub async fn payment_connector_verify( + state: web::Data, + req: HttpRequest, + json_payload: web::Json, +) -> HttpResponse { + let flow = Flow::VerifyPaymentConnector; + Box::pin(services::server_wrap( + flow, + state, + &req, + json_payload.into_inner(), + |state, _: (), req| verify_connector::verify_connector_credentials(state, req), + &auth::JWTAuth(Permission::MerchantConnectorAccountWrite), + api_locking::LockAction::NotApplicable, + )) + .await +} diff --git a/crates/router/src/services.rs b/crates/router/src/services.rs index 2d5552b59d17..e46612b95dfc 100644 --- a/crates/router/src/services.rs +++ b/crates/router/src/services.rs @@ -4,8 +4,12 @@ pub mod authorization; pub mod encryption; #[cfg(feature = "olap")] pub mod jwt; +pub mod kafka; pub mod logger; +#[cfg(feature = "email")] +pub mod email; + #[cfg(feature = "kms")] use data_models::errors::StorageError; use data_models::errors::StorageResult; diff --git a/crates/router/src/services/api.rs b/crates/router/src/services/api.rs index 5481d5c5cf9d..1ff46474db59 100644 --- a/crates/router/src/services/api.rs +++ b/crates/router/src/services/api.rs @@ -873,6 +873,7 @@ where }; let api_event = ApiEvent::new( + Some(merchant_id.clone()), flow, &request_id, request_duration, @@ -884,6 +885,7 @@ where error, event_type.unwrap_or(ApiEventsType::Miscellaneous), request, + Some(request.method().to_string()), ); match api_event.clone().try_into() { Ok(event) => { diff --git a/crates/router/src/services/authentication.rs b/crates/router/src/services/authentication.rs index b01e3762bfab..8a0cd7c729e9 100644 --- a/crates/router/src/services/authentication.rs +++ b/crates/router/src/services/authentication.rs @@ -444,6 +444,9 @@ where ) -> RouterResult<(UserFromToken, AuthenticationType)> { let payload = parse_jwt_payload::(request_headers, state).await?; + let permissions = authorization::get_permissions(&payload.role_id)?; + authorization::check_authorization(&self.0, permissions)?; + Ok(( UserFromToken { user_id: payload.user_id.clone(), diff --git a/crates/router/src/services/authorization/predefined_permissions.rs b/crates/router/src/services/authorization/predefined_permissions.rs index 89fa2c8f739c..a9f2b864d0ad 100644 --- a/crates/router/src/services/authorization/predefined_permissions.rs +++ b/crates/router/src/services/authorization/predefined_permissions.rs @@ -28,7 +28,67 @@ impl RoleInfo { pub static PREDEFINED_PERMISSIONS: Lazy> = Lazy::new(|| { let mut roles = HashMap::new(); roles.insert( - consts::ROLE_ID_ORGANIZATION_ADMIN, + consts::user_role::ROLE_ID_INTERNAL_ADMIN, + RoleInfo { + permissions: vec![ + Permission::PaymentRead, + Permission::PaymentWrite, + Permission::RefundRead, + Permission::RefundWrite, + Permission::ApiKeyRead, + Permission::ApiKeyWrite, + Permission::MerchantAccountRead, + Permission::MerchantAccountWrite, + Permission::MerchantConnectorAccountRead, + Permission::MerchantConnectorAccountWrite, + Permission::RoutingRead, + Permission::RoutingWrite, + Permission::ForexRead, + Permission::ThreeDsDecisionManagerWrite, + Permission::ThreeDsDecisionManagerRead, + Permission::SurchargeDecisionManagerWrite, + Permission::SurchargeDecisionManagerRead, + Permission::DisputeRead, + Permission::DisputeWrite, + Permission::MandateRead, + Permission::MandateWrite, + Permission::FileRead, + Permission::FileWrite, + Permission::Analytics, + Permission::UsersRead, + Permission::UsersWrite, + Permission::MerchantAccountCreate, + ], + name: None, + is_invitable: false, + }, + ); + roles.insert( + consts::user_role::ROLE_ID_INTERNAL_VIEW_ONLY_USER, + RoleInfo { + permissions: vec![ + Permission::PaymentRead, + Permission::RefundRead, + Permission::ApiKeyRead, + Permission::MerchantAccountRead, + Permission::MerchantConnectorAccountRead, + Permission::RoutingRead, + Permission::ForexRead, + Permission::ThreeDsDecisionManagerRead, + Permission::SurchargeDecisionManagerRead, + Permission::Analytics, + Permission::DisputeRead, + Permission::MandateRead, + Permission::FileRead, + Permission::UsersRead, + ], + name: None, + is_invitable: false, + }, + ); + + roles.insert( + consts::user_role::ROLE_ID_ORGANIZATION_ADMIN, RoleInfo { permissions: vec![ Permission::PaymentRead, @@ -63,6 +123,164 @@ pub static PREDEFINED_PERMISSIONS: Lazy> = Lazy: is_invitable: false, }, ); + + // MERCHANT ROLES + roles.insert( + consts::user_role::ROLE_ID_MERCHANT_ADMIN, + RoleInfo { + permissions: vec![ + Permission::PaymentRead, + Permission::PaymentWrite, + Permission::RefundRead, + Permission::RefundWrite, + Permission::ApiKeyRead, + Permission::ApiKeyWrite, + Permission::MerchantAccountRead, + Permission::MerchantAccountWrite, + Permission::MerchantConnectorAccountRead, + Permission::ForexRead, + Permission::MerchantConnectorAccountWrite, + Permission::RoutingRead, + Permission::RoutingWrite, + Permission::ThreeDsDecisionManagerWrite, + Permission::ThreeDsDecisionManagerRead, + Permission::SurchargeDecisionManagerWrite, + Permission::SurchargeDecisionManagerRead, + Permission::DisputeRead, + Permission::DisputeWrite, + Permission::MandateRead, + Permission::MandateWrite, + Permission::FileRead, + Permission::FileWrite, + Permission::Analytics, + Permission::UsersRead, + Permission::UsersWrite, + ], + name: Some("Admin"), + is_invitable: true, + }, + ); + roles.insert( + consts::user_role::ROLE_ID_MERCHANT_VIEW_ONLY, + RoleInfo { + permissions: vec![ + Permission::PaymentRead, + Permission::RefundRead, + Permission::ApiKeyRead, + Permission::MerchantAccountRead, + Permission::ForexRead, + Permission::MerchantConnectorAccountRead, + Permission::RoutingRead, + Permission::ThreeDsDecisionManagerRead, + Permission::SurchargeDecisionManagerRead, + Permission::DisputeRead, + Permission::MandateRead, + Permission::FileRead, + Permission::Analytics, + Permission::UsersRead, + ], + name: Some("View Only"), + is_invitable: true, + }, + ); + roles.insert( + consts::user_role::ROLE_ID_MERCHANT_IAM_ADMIN, + RoleInfo { + permissions: vec![ + Permission::PaymentRead, + Permission::RefundRead, + Permission::ApiKeyRead, + Permission::MerchantAccountRead, + Permission::ForexRead, + Permission::MerchantConnectorAccountRead, + Permission::RoutingRead, + Permission::ThreeDsDecisionManagerRead, + Permission::SurchargeDecisionManagerRead, + Permission::DisputeRead, + Permission::MandateRead, + Permission::FileRead, + Permission::Analytics, + Permission::UsersRead, + Permission::UsersWrite, + ], + name: Some("IAM"), + is_invitable: true, + }, + ); + roles.insert( + consts::user_role::ROLE_ID_MERCHANT_DEVELOPER, + RoleInfo { + permissions: vec![ + Permission::PaymentRead, + Permission::RefundRead, + Permission::ApiKeyRead, + Permission::ApiKeyWrite, + Permission::MerchantAccountRead, + Permission::ForexRead, + Permission::MerchantConnectorAccountRead, + Permission::RoutingRead, + Permission::ThreeDsDecisionManagerRead, + Permission::SurchargeDecisionManagerRead, + Permission::DisputeRead, + Permission::MandateRead, + Permission::FileRead, + Permission::Analytics, + Permission::UsersRead, + ], + name: Some("Developer"), + is_invitable: true, + }, + ); + roles.insert( + consts::user_role::ROLE_ID_MERCHANT_OPERATOR, + RoleInfo { + permissions: vec![ + Permission::PaymentRead, + Permission::PaymentWrite, + Permission::RefundRead, + Permission::RefundWrite, + Permission::ApiKeyRead, + Permission::MerchantAccountRead, + Permission::ForexRead, + Permission::MerchantConnectorAccountRead, + Permission::MerchantConnectorAccountWrite, + Permission::RoutingRead, + Permission::RoutingWrite, + Permission::ThreeDsDecisionManagerRead, + Permission::ThreeDsDecisionManagerWrite, + Permission::SurchargeDecisionManagerRead, + Permission::SurchargeDecisionManagerWrite, + Permission::DisputeRead, + Permission::MandateRead, + Permission::FileRead, + Permission::Analytics, + Permission::UsersRead, + ], + name: Some("Operator"), + is_invitable: true, + }, + ); + roles.insert( + consts::user_role::ROLE_ID_MERCHANT_CUSTOMER_SUPPORT, + RoleInfo { + permissions: vec![ + Permission::PaymentRead, + Permission::RefundRead, + Permission::RefundWrite, + Permission::ForexRead, + Permission::DisputeRead, + Permission::DisputeWrite, + Permission::MerchantAccountRead, + Permission::MerchantConnectorAccountRead, + Permission::MandateRead, + Permission::FileRead, + Permission::FileWrite, + Permission::Analytics, + ], + name: Some("Customer Support"), + is_invitable: true, + }, + ); roles }); diff --git a/crates/router/src/services/email.rs b/crates/router/src/services/email.rs new file mode 100644 index 000000000000..cd408564ea08 --- /dev/null +++ b/crates/router/src/services/email.rs @@ -0,0 +1 @@ +pub mod types; diff --git a/crates/router/src/services/email/assets/invite.html b/crates/router/src/services/email/assets/invite.html new file mode 100644 index 000000000000..307ec6cead85 --- /dev/null +++ b/crates/router/src/services/email/assets/invite.html @@ -0,0 +1,243 @@ + +Welcome to HyperSwitch! + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ Welcome to HyperSwitch! +
+
+ Hi {username}
+
+
+ You have received this email because your administrator has invited you as a new user on + Hyperswitch. +
+
+
+ To get started, click on the button below. +
+ + + + +
+ Click here to Join +
+
+
+ If the link has already expired, you can request a new link from your administrator or reach out to + your internal support for more assistance.
+
+ Thanks,
+ Team Hyperswitch +
+
+ diff --git a/crates/router/src/services/email/assets/magic_link.html b/crates/router/src/services/email/assets/magic_link.html new file mode 100644 index 000000000000..643b6e230633 --- /dev/null +++ b/crates/router/src/services/email/assets/magic_link.html @@ -0,0 +1,256 @@ + +Login to Hyperswitch + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ Welcome to Hyperswitch! +

Dear {user_name},

+ + We are thrilled to welcome you into our community! + +
+
+ Simply click on the link below, and you'll be granted instant access + to your Hyperswitch account. Note that this link expires in 24 hours + and can only be used once.
+
+ + + + +
+ Unlock Hyperswitch +
+
+ Thanks,
+ Team Hyperswitch +
+
+ diff --git a/crates/router/src/services/email/assets/recon_activated.html b/crates/router/src/services/email/assets/recon_activated.html new file mode 100644 index 000000000000..7feffacb09df --- /dev/null +++ b/crates/router/src/services/email/assets/recon_activated.html @@ -0,0 +1,309 @@ + +Access Granted to HyperSwitch Recon Dashboard! + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ Access Granted to HyperSwitch Recon Dashboard! +
+
+ Dear {username}
+
+
+ We are pleased to inform you that your Reconciliation access request + has been approved. As a result, you now have authorized access to the + Recon dashboard, allowing you to test its functionality and experience + its benefits firsthand. +
+
+
+ To access the Recon dashboard, please follow these steps +
+
+
    +
  1. + Visit our website at + Hyperswitch Dashboard. +
  2. +
  3. Click on the "Login" button.
  4. +
  5. Enter your login credentials to log in.
  6. +
  7. + Once logged in, you will have full access to the Recon dashboard, + where you can explore its comprehensive features. +
  8. +
+ Should you have any inquiries or require any form of assistance, + please do not hesitate to reach out to our team on + Slack , + and we will be more than willing to assist you promptly.

+ Wishing you a seamless and successful experience as you explore the + capabilities of Hyperswitch.
+
+ Thanks,
+ Team Hyperswitch +
+
+ \ No newline at end of file diff --git a/crates/router/src/services/email/assets/reset.html b/crates/router/src/services/email/assets/reset.html new file mode 100644 index 000000000000..98ddf8a7bd16 --- /dev/null +++ b/crates/router/src/services/email/assets/reset.html @@ -0,0 +1,229 @@ + +Hyperswitch Merchant + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ Reset Your Password +
+
+ Hey {username}
+
+
+ We have received a request to reset your password associated with +
+ username : + {username}
+
+
+ Click on the below button to reset your password.
+
+ + + + +
+ Reset Password +
+
+ Thanks,
+ Team Hyperswitch +
+
+ diff --git a/crates/router/src/services/email/assets/verify.html b/crates/router/src/services/email/assets/verify.html new file mode 100644 index 000000000000..47d0e3b5c6d5 --- /dev/null +++ b/crates/router/src/services/email/assets/verify.html @@ -0,0 +1,253 @@ + +Hyperswitch Merchant + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ Thanks for signing up!
We need a confirmation of your email address to complete your + registration. +
+
+ Click below to confirm your email address.
+
+ + + + +
+ Verify Email Now +
+
+ Thanks,
+ Team Hyperswitch +
+
+ diff --git a/crates/router/src/services/email/types.rs b/crates/router/src/services/email/types.rs new file mode 100644 index 000000000000..a4a4681c6001 --- /dev/null +++ b/crates/router/src/services/email/types.rs @@ -0,0 +1,195 @@ +use common_utils::errors::CustomResult; +use error_stack::ResultExt; +use external_services::email::{EmailContents, EmailData, EmailError}; +use masking::ExposeInterface; + +use crate::{configs, consts}; +#[cfg(feature = "olap")] +use crate::{core::errors::UserErrors, services::jwt, types::domain}; + +pub enum EmailBody { + Verify { link: String }, + Reset { link: String, user_name: String }, + MagicLink { link: String, user_name: String }, + InviteUser { link: String, user_name: String }, +} + +pub mod html { + use crate::services::email::types::EmailBody; + + pub fn get_html_body(email_body: EmailBody) -> String { + match email_body { + EmailBody::Verify { link } => { + format!(include_str!("assets/verify.html"), link = link) + } + EmailBody::Reset { link, user_name } => { + format!( + include_str!("assets/reset.html"), + link = link, + username = user_name + ) + } + EmailBody::MagicLink { link, user_name } => { + format!( + include_str!("assets/magic_link.html"), + user_name = user_name, + link = link + ) + } + EmailBody::InviteUser { link, user_name } => { + format!( + include_str!("assets/invite.html"), + username = user_name, + link = link + ) + } + } + } +} + +#[derive(serde::Serialize, serde::Deserialize)] +pub struct EmailToken { + email: String, + expiration: u64, +} + +impl EmailToken { + pub async fn new_token( + email: domain::UserEmail, + settings: &configs::settings::Settings, + ) -> CustomResult { + let expiration_duration = std::time::Duration::from_secs(consts::EMAIL_TOKEN_TIME_IN_SECS); + let expiration = jwt::generate_exp(expiration_duration)?.as_secs(); + let token_payload = Self { + email: email.get_secret().expose(), + expiration, + }; + jwt::generate_jwt(&token_payload, settings).await + } +} + +pub fn get_link_with_token( + base_url: impl std::fmt::Display, + token: impl std::fmt::Display, + action: impl std::fmt::Display, +) -> String { + format!("{base_url}/user/{action}/?token={token}") +} + +pub struct VerifyEmail { + pub recipient_email: domain::UserEmail, + pub settings: std::sync::Arc, + pub subject: &'static str, +} + +/// Currently only HTML is supported +#[async_trait::async_trait] +impl EmailData for VerifyEmail { + async fn get_email_data(&self) -> CustomResult { + let token = EmailToken::new_token(self.recipient_email.clone(), &self.settings) + .await + .change_context(EmailError::TokenGenerationFailure)?; + + let verify_email_link = + get_link_with_token(&self.settings.server.base_url, token, "verify_email"); + + let body = html::get_html_body(EmailBody::Verify { + link: verify_email_link, + }); + + Ok(EmailContents { + subject: self.subject.to_string(), + body: external_services::email::IntermediateString::new(body), + recipient: self.recipient_email.clone().into_inner(), + }) + } +} + +pub struct ResetPassword { + pub recipient_email: domain::UserEmail, + pub user_name: domain::UserName, + pub settings: std::sync::Arc, + pub subject: &'static str, +} + +#[async_trait::async_trait] +impl EmailData for ResetPassword { + async fn get_email_data(&self) -> CustomResult { + let token = EmailToken::new_token(self.recipient_email.clone(), &self.settings) + .await + .change_context(EmailError::TokenGenerationFailure)?; + + let reset_password_link = + get_link_with_token(&self.settings.server.base_url, token, "set_password"); + + let body = html::get_html_body(EmailBody::Reset { + link: reset_password_link, + user_name: self.user_name.clone().get_secret().expose(), + }); + + Ok(EmailContents { + subject: self.subject.to_string(), + body: external_services::email::IntermediateString::new(body), + recipient: self.recipient_email.clone().into_inner(), + }) + } +} + +pub struct MagicLink { + pub recipient_email: domain::UserEmail, + pub user_name: domain::UserName, + pub settings: std::sync::Arc, + pub subject: &'static str, +} + +#[async_trait::async_trait] +impl EmailData for MagicLink { + async fn get_email_data(&self) -> CustomResult { + let token = EmailToken::new_token(self.recipient_email.clone(), &self.settings) + .await + .change_context(EmailError::TokenGenerationFailure)?; + + let magic_link_login = get_link_with_token(&self.settings.server.base_url, token, "login"); + + let body = html::get_html_body(EmailBody::MagicLink { + link: magic_link_login, + user_name: self.user_name.clone().get_secret().expose(), + }); + + Ok(EmailContents { + subject: self.subject.to_string(), + body: external_services::email::IntermediateString::new(body), + recipient: self.recipient_email.clone().into_inner(), + }) + } +} + +pub struct InviteUser { + pub recipient_email: domain::UserEmail, + pub user_name: domain::UserName, + pub settings: std::sync::Arc, + pub subject: &'static str, +} + +#[async_trait::async_trait] +impl EmailData for InviteUser { + async fn get_email_data(&self) -> CustomResult { + let token = EmailToken::new_token(self.recipient_email.clone(), &self.settings) + .await + .change_context(EmailError::TokenGenerationFailure)?; + + let invite_user_link = + get_link_with_token(&self.settings.server.base_url, token, "set_password"); + + let body = html::get_html_body(EmailBody::MagicLink { + link: invite_user_link, + user_name: self.user_name.clone().get_secret().expose(), + }); + + Ok(EmailContents { + subject: self.subject.to_string(), + body: external_services::email::IntermediateString::new(body), + recipient: self.recipient_email.clone().into_inner(), + }) + } +} diff --git a/crates/router/src/services/kafka.rs b/crates/router/src/services/kafka.rs new file mode 100644 index 000000000000..497ac16721b5 --- /dev/null +++ b/crates/router/src/services/kafka.rs @@ -0,0 +1,314 @@ +use std::sync::Arc; + +use common_utils::errors::CustomResult; +use error_stack::{report, IntoReport, ResultExt}; +use rdkafka::{ + config::FromClientConfig, + producer::{BaseRecord, DefaultProducerContext, Producer, ThreadedProducer}, +}; + +use crate::events::EventType; +mod api_event; +pub mod outgoing_request; +mod payment_attempt; +mod payment_intent; +mod refund; +pub use api_event::{ApiCallEventType, ApiEvents, ApiEventsType}; +use data_models::payments::{payment_attempt::PaymentAttempt, PaymentIntent}; +use diesel_models::refund::Refund; +use serde::Serialize; +use time::OffsetDateTime; + +use self::{ + payment_attempt::KafkaPaymentAttempt, payment_intent::KafkaPaymentIntent, refund::KafkaRefund, +}; +// Using message queue result here to avoid confusion with Kafka result provided by library +pub type MQResult = CustomResult; + +pub trait KafkaMessage +where + Self: Serialize, +{ + fn value(&self) -> MQResult> { + // Add better error logging here + serde_json::to_vec(&self) + .into_report() + .change_context(KafkaError::GenericError) + } + + fn key(&self) -> String; + + fn creation_timestamp(&self) -> Option { + None + } +} + +#[derive(serde::Serialize, Debug)] +struct KafkaEvent<'a, T: KafkaMessage> { + #[serde(flatten)] + event: &'a T, + sign_flag: i32, +} + +impl<'a, T: KafkaMessage> KafkaEvent<'a, T> { + fn new(event: &'a T) -> Self { + Self { + event, + sign_flag: 1, + } + } + fn old(event: &'a T) -> Self { + Self { + event, + sign_flag: -1, + } + } +} + +impl<'a, T: KafkaMessage> KafkaMessage for KafkaEvent<'a, T> { + fn key(&self) -> String { + self.event.key() + } + + fn creation_timestamp(&self) -> Option { + self.event.creation_timestamp() + } +} + +#[derive(Debug, serde::Deserialize, Clone, Default)] +#[serde(default)] +pub struct KafkaSettings { + brokers: Vec, + intent_analytics_topic: String, + attempt_analytics_topic: String, + refund_analytics_topic: String, + api_logs_topic: String, +} + +impl KafkaSettings { + pub fn validate(&self) -> Result<(), crate::core::errors::ApplicationError> { + use common_utils::ext_traits::ConfigExt; + + use crate::core::errors::ApplicationError; + + common_utils::fp_utils::when(self.brokers.is_empty(), || { + Err(ApplicationError::InvalidConfigurationValueError( + "Kafka brokers must not be empty".into(), + )) + })?; + + common_utils::fp_utils::when(self.intent_analytics_topic.is_default_or_empty(), || { + Err(ApplicationError::InvalidConfigurationValueError( + "Kafka Intent Analytics topic must not be empty".into(), + )) + })?; + + common_utils::fp_utils::when(self.attempt_analytics_topic.is_default_or_empty(), || { + Err(ApplicationError::InvalidConfigurationValueError( + "Kafka Attempt Analytics topic must not be empty".into(), + )) + })?; + + common_utils::fp_utils::when(self.refund_analytics_topic.is_default_or_empty(), || { + Err(ApplicationError::InvalidConfigurationValueError( + "Kafka Refund Analytics topic must not be empty".into(), + )) + })?; + + common_utils::fp_utils::when(self.api_logs_topic.is_default_or_empty(), || { + Err(ApplicationError::InvalidConfigurationValueError( + "Kafka API event Analytics topic must not be empty".into(), + )) + }) + } +} + +#[derive(Clone, Debug)] +pub struct KafkaProducer { + producer: Arc, + intent_analytics_topic: String, + attempt_analytics_topic: String, + refund_analytics_topic: String, + api_logs_topic: String, +} + +struct RdKafkaProducer(ThreadedProducer); + +impl std::fmt::Debug for RdKafkaProducer { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str("RdKafkaProducer") + } +} + +#[derive(Debug, Clone, thiserror::Error)] +pub enum KafkaError { + #[error("Generic Kafka Error")] + GenericError, + #[error("Kafka not implemented")] + NotImplemented, + #[error("Kafka Initialization Error")] + InitializationError, +} + +#[allow(unused)] +impl KafkaProducer { + pub async fn create(conf: &KafkaSettings) -> MQResult { + Ok(Self { + producer: Arc::new(RdKafkaProducer( + ThreadedProducer::from_config( + rdkafka::ClientConfig::new().set("bootstrap.servers", conf.brokers.join(",")), + ) + .into_report() + .change_context(KafkaError::InitializationError)?, + )), + + intent_analytics_topic: conf.intent_analytics_topic.clone(), + attempt_analytics_topic: conf.attempt_analytics_topic.clone(), + refund_analytics_topic: conf.refund_analytics_topic.clone(), + api_logs_topic: conf.api_logs_topic.clone(), + }) + } + + pub fn log_kafka_event( + &self, + topic: &str, + event: &T, + ) -> MQResult<()> { + router_env::logger::debug!("Logging Kafka Event {event:?}"); + self.producer + .0 + .send( + BaseRecord::to(topic) + .key(&event.key()) + .payload(&event.value()?) + .timestamp( + event + .creation_timestamp() + .unwrap_or_else(|| OffsetDateTime::now_utc().unix_timestamp()), + ), + ) + .map_err(|(error, record)| report!(error).attach_printable(format!("{record:?}"))) + .change_context(KafkaError::GenericError) + } + + pub async fn log_payment_attempt( + &self, + attempt: &PaymentAttempt, + old_attempt: Option, + ) -> MQResult<()> { + if let Some(negative_event) = old_attempt { + self.log_kafka_event( + &self.attempt_analytics_topic, + &KafkaEvent::old(&KafkaPaymentAttempt::from_storage(&negative_event)), + ) + .attach_printable_lazy(|| { + format!("Failed to add negative attempt event {negative_event:?}") + })?; + }; + self.log_kafka_event( + &self.attempt_analytics_topic, + &KafkaEvent::new(&KafkaPaymentAttempt::from_storage(attempt)), + ) + .attach_printable_lazy(|| format!("Failed to add positive attempt event {attempt:?}")) + } + + pub async fn log_payment_attempt_delete( + &self, + delete_old_attempt: &PaymentAttempt, + ) -> MQResult<()> { + self.log_kafka_event( + &self.attempt_analytics_topic, + &KafkaEvent::old(&KafkaPaymentAttempt::from_storage(delete_old_attempt)), + ) + .attach_printable_lazy(|| { + format!("Failed to add negative attempt event {delete_old_attempt:?}") + }) + } + + pub async fn log_payment_intent( + &self, + intent: &PaymentIntent, + old_intent: Option, + ) -> MQResult<()> { + if let Some(negative_event) = old_intent { + self.log_kafka_event( + &self.intent_analytics_topic, + &KafkaEvent::old(&KafkaPaymentIntent::from_storage(&negative_event)), + ) + .attach_printable_lazy(|| { + format!("Failed to add negative intent event {negative_event:?}") + })?; + }; + self.log_kafka_event( + &self.intent_analytics_topic, + &KafkaEvent::new(&KafkaPaymentIntent::from_storage(intent)), + ) + .attach_printable_lazy(|| format!("Failed to add positive intent event {intent:?}")) + } + + pub async fn log_payment_intent_delete( + &self, + delete_old_intent: &PaymentIntent, + ) -> MQResult<()> { + self.log_kafka_event( + &self.intent_analytics_topic, + &KafkaEvent::old(&KafkaPaymentIntent::from_storage(delete_old_intent)), + ) + .attach_printable_lazy(|| { + format!("Failed to add negative intent event {delete_old_intent:?}") + }) + } + + pub async fn log_refund(&self, refund: &Refund, old_refund: Option) -> MQResult<()> { + if let Some(negative_event) = old_refund { + self.log_kafka_event( + &self.refund_analytics_topic, + &KafkaEvent::old(&KafkaRefund::from_storage(&negative_event)), + ) + .attach_printable_lazy(|| { + format!("Failed to add negative refund event {negative_event:?}") + })?; + }; + self.log_kafka_event( + &self.refund_analytics_topic, + &KafkaEvent::new(&KafkaRefund::from_storage(refund)), + ) + .attach_printable_lazy(|| format!("Failed to add positive refund event {refund:?}")) + } + + pub async fn log_refund_delete(&self, delete_old_refund: &Refund) -> MQResult<()> { + self.log_kafka_event( + &self.refund_analytics_topic, + &KafkaEvent::old(&KafkaRefund::from_storage(delete_old_refund)), + ) + .attach_printable_lazy(|| { + format!("Failed to add negative refund event {delete_old_refund:?}") + }) + } + + pub async fn log_api_event(&self, event: &ApiEvents) -> MQResult<()> { + self.log_kafka_event(&self.api_logs_topic, event) + .attach_printable_lazy(|| format!("Failed to add api log event {event:?}")) + } + + pub fn get_topic(&self, event: EventType) -> &str { + match event { + EventType::ApiLogs => &self.api_logs_topic, + EventType::PaymentAttempt => &self.attempt_analytics_topic, + EventType::PaymentIntent => &self.intent_analytics_topic, + EventType::Refund => &self.refund_analytics_topic, + } + } +} + +impl Drop for RdKafkaProducer { + fn drop(&mut self) { + // Flush the producer to send any pending messages + match self.0.flush(rdkafka::util::Timeout::After( + std::time::Duration::from_secs(5), + )) { + Ok(_) => router_env::logger::info!("Kafka events flush Successful"), + Err(error) => router_env::logger::error!("Failed to flush Kafka Events {error:?}"), + } + } +} diff --git a/crates/router/src/services/kafka/api_event.rs b/crates/router/src/services/kafka/api_event.rs new file mode 100644 index 000000000000..7de271915927 --- /dev/null +++ b/crates/router/src/services/kafka/api_event.rs @@ -0,0 +1,108 @@ +use api_models::enums as api_enums; +use serde::{Deserialize, Serialize}; +use time::OffsetDateTime; + +#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] +#[serde(tag = "flow_type")] +pub enum ApiEventsType { + Payment { + payment_id: String, + }, + Refund { + payment_id: String, + refund_id: String, + }, + Default, + PaymentMethod { + payment_method_id: String, + payment_method: Option, + payment_method_type: Option, + }, + Customer { + customer_id: String, + }, + User { + //specified merchant_id will overridden on global defined + merchant_id: String, + user_id: String, + }, + Webhooks { + connector: String, + payment_id: Option, + }, + OutgoingEvent, +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] +pub struct ApiEvents { + pub api_name: String, + pub request_id: Option, + //It is require to solve ambiquity in case of event_type is User + #[serde(skip_serializing_if = "Option::is_none")] + pub merchant_id: Option, + pub request: String, + pub response: String, + pub status_code: u16, + #[serde(with = "time::serde::timestamp")] + pub created_at: OffsetDateTime, + pub latency: u128, + //conflicting fields underlying enums will be used + #[serde(flatten)] + pub event_type: ApiEventsType, + pub user_agent: Option, + pub ip_addr: Option, + pub url_path: Option, + pub api_event_type: Option, +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] +pub enum ApiCallEventType { + IncomingApiEvent, + OutgoingApiEvent, +} + +impl super::KafkaMessage for ApiEvents { + fn key(&self) -> String { + match &self.event_type { + ApiEventsType::Payment { payment_id } => format!( + "{}_{}", + self.merchant_id + .as_ref() + .unwrap_or(&"default_merchant_id".to_string()), + payment_id + ), + ApiEventsType::Refund { + payment_id, + refund_id, + } => format!("{payment_id}_{refund_id}"), + ApiEventsType::Default => "key".to_string(), + ApiEventsType::PaymentMethod { + payment_method_id, + payment_method, + payment_method_type, + } => format!( + "{:?}_{:?}_{:?}", + payment_method_id.clone(), + payment_method.clone(), + payment_method_type.clone(), + ), + ApiEventsType::Customer { customer_id } => customer_id.to_string(), + ApiEventsType::User { + merchant_id, + user_id, + } => format!("{}_{}", merchant_id, user_id), + ApiEventsType::Webhooks { + connector, + payment_id, + } => format!( + "webhook_{}_{connector}", + payment_id.clone().unwrap_or_default() + ), + ApiEventsType::OutgoingEvent => "outgoing_event".to_string(), + } + } + + fn creation_timestamp(&self) -> Option { + Some(self.created_at.unix_timestamp()) + } +} diff --git a/crates/router/src/services/kafka/outgoing_request.rs b/crates/router/src/services/kafka/outgoing_request.rs new file mode 100644 index 000000000000..bb09fe91fe6d --- /dev/null +++ b/crates/router/src/services/kafka/outgoing_request.rs @@ -0,0 +1,19 @@ +use reqwest::Url; + +pub struct OutgoingRequest { + pub url: Url, + pub latency: u128, +} + +// impl super::KafkaMessage for OutgoingRequest { +// fn key(&self) -> String { +// format!( +// "{}_{}", + +// ) +// } + +// fn creation_timestamp(&self) -> Option { +// Some(self.created_at.unix_timestamp()) +// } +// } diff --git a/crates/router/src/services/kafka/payment_attempt.rs b/crates/router/src/services/kafka/payment_attempt.rs new file mode 100644 index 000000000000..ea0721f418e5 --- /dev/null +++ b/crates/router/src/services/kafka/payment_attempt.rs @@ -0,0 +1,92 @@ +use data_models::payments::payment_attempt::PaymentAttempt; +use diesel_models::enums as storage_enums; +use time::OffsetDateTime; + +#[derive(serde::Serialize, Debug)] +pub struct KafkaPaymentAttempt<'a> { + pub payment_id: &'a String, + pub merchant_id: &'a String, + pub attempt_id: &'a String, + pub status: storage_enums::AttemptStatus, + pub amount: i64, + pub currency: Option, + pub save_to_locker: Option, + pub connector: Option<&'a String>, + pub error_message: Option<&'a String>, + pub offer_amount: Option, + pub surcharge_amount: Option, + pub tax_amount: Option, + pub payment_method_id: Option<&'a String>, + pub payment_method: Option, + pub connector_transaction_id: Option<&'a String>, + pub capture_method: Option, + #[serde(default, with = "time::serde::timestamp::option")] + pub capture_on: Option, + pub confirm: bool, + pub authentication_type: Option, + #[serde(with = "time::serde::timestamp")] + pub created_at: OffsetDateTime, + #[serde(with = "time::serde::timestamp")] + pub modified_at: OffsetDateTime, + #[serde(default, with = "time::serde::timestamp::option")] + pub last_synced: Option, + pub cancellation_reason: Option<&'a String>, + pub amount_to_capture: Option, + pub mandate_id: Option<&'a String>, + pub browser_info: Option, + pub error_code: Option<&'a String>, + pub connector_metadata: Option, + // TODO: These types should implement copy ideally + pub payment_experience: Option<&'a storage_enums::PaymentExperience>, + pub payment_method_type: Option<&'a storage_enums::PaymentMethodType>, +} + +impl<'a> KafkaPaymentAttempt<'a> { + pub fn from_storage(attempt: &'a PaymentAttempt) -> Self { + Self { + payment_id: &attempt.payment_id, + merchant_id: &attempt.merchant_id, + attempt_id: &attempt.attempt_id, + status: attempt.status, + amount: attempt.amount, + currency: attempt.currency, + save_to_locker: attempt.save_to_locker, + connector: attempt.connector.as_ref(), + error_message: attempt.error_message.as_ref(), + offer_amount: attempt.offer_amount, + surcharge_amount: attempt.surcharge_amount, + tax_amount: attempt.tax_amount, + payment_method_id: attempt.payment_method_id.as_ref(), + payment_method: attempt.payment_method, + connector_transaction_id: attempt.connector_transaction_id.as_ref(), + capture_method: attempt.capture_method, + capture_on: attempt.capture_on.map(|i| i.assume_utc()), + confirm: attempt.confirm, + authentication_type: attempt.authentication_type, + created_at: attempt.created_at.assume_utc(), + modified_at: attempt.modified_at.assume_utc(), + last_synced: attempt.last_synced.map(|i| i.assume_utc()), + cancellation_reason: attempt.cancellation_reason.as_ref(), + amount_to_capture: attempt.amount_to_capture, + mandate_id: attempt.mandate_id.as_ref(), + browser_info: attempt.browser_info.as_ref().map(|v| v.to_string()), + error_code: attempt.error_code.as_ref(), + connector_metadata: attempt.connector_metadata.as_ref().map(|v| v.to_string()), + payment_experience: attempt.payment_experience.as_ref(), + payment_method_type: attempt.payment_method_type.as_ref(), + } + } +} + +impl<'a> super::KafkaMessage for KafkaPaymentAttempt<'a> { + fn key(&self) -> String { + format!( + "{}_{}_{}", + self.merchant_id, self.payment_id, self.attempt_id + ) + } + + fn creation_timestamp(&self) -> Option { + Some(self.modified_at.unix_timestamp()) + } +} diff --git a/crates/router/src/services/kafka/payment_intent.rs b/crates/router/src/services/kafka/payment_intent.rs new file mode 100644 index 000000000000..70980a6e8652 --- /dev/null +++ b/crates/router/src/services/kafka/payment_intent.rs @@ -0,0 +1,71 @@ +use data_models::payments::PaymentIntent; +use diesel_models::enums as storage_enums; +use time::OffsetDateTime; + +#[derive(serde::Serialize, Debug)] +pub struct KafkaPaymentIntent<'a> { + pub payment_id: &'a String, + pub merchant_id: &'a String, + pub status: storage_enums::IntentStatus, + pub amount: i64, + pub currency: Option, + pub amount_captured: Option, + pub customer_id: Option<&'a String>, + pub description: Option<&'a String>, + pub return_url: Option<&'a String>, + pub connector_id: Option<&'a String>, + pub statement_descriptor_name: Option<&'a String>, + pub statement_descriptor_suffix: Option<&'a String>, + #[serde(with = "time::serde::timestamp")] + pub created_at: OffsetDateTime, + #[serde(with = "time::serde::timestamp")] + pub modified_at: OffsetDateTime, + #[serde(default, with = "time::serde::timestamp::option")] + pub last_synced: Option, + pub setup_future_usage: Option, + pub off_session: Option, + pub client_secret: Option<&'a String>, + pub active_attempt_id: String, + pub business_country: Option, + pub business_label: Option<&'a String>, + pub attempt_count: i16, +} + +impl<'a> KafkaPaymentIntent<'a> { + pub fn from_storage(intent: &'a PaymentIntent) -> Self { + Self { + payment_id: &intent.payment_id, + merchant_id: &intent.merchant_id, + status: intent.status, + amount: intent.amount, + currency: intent.currency, + amount_captured: intent.amount_captured, + customer_id: intent.customer_id.as_ref(), + description: intent.description.as_ref(), + return_url: intent.return_url.as_ref(), + connector_id: intent.connector_id.as_ref(), + statement_descriptor_name: intent.statement_descriptor_name.as_ref(), + statement_descriptor_suffix: intent.statement_descriptor_suffix.as_ref(), + created_at: intent.created_at.assume_utc(), + modified_at: intent.modified_at.assume_utc(), + last_synced: intent.last_synced.map(|i| i.assume_utc()), + setup_future_usage: intent.setup_future_usage, + off_session: intent.off_session, + client_secret: intent.client_secret.as_ref(), + active_attempt_id: intent.active_attempt.get_id(), + business_country: intent.business_country, + business_label: intent.business_label.as_ref(), + attempt_count: intent.attempt_count, + } + } +} + +impl<'a> super::KafkaMessage for KafkaPaymentIntent<'a> { + fn key(&self) -> String { + format!("{}_{}", self.merchant_id, self.payment_id) + } + + fn creation_timestamp(&self) -> Option { + Some(self.modified_at.unix_timestamp()) + } +} diff --git a/crates/router/src/services/kafka/refund.rs b/crates/router/src/services/kafka/refund.rs new file mode 100644 index 000000000000..0cc4865e7512 --- /dev/null +++ b/crates/router/src/services/kafka/refund.rs @@ -0,0 +1,68 @@ +use diesel_models::{enums as storage_enums, refund::Refund}; +use time::OffsetDateTime; + +#[derive(serde::Serialize, Debug)] +pub struct KafkaRefund<'a> { + pub internal_reference_id: &'a String, + pub refund_id: &'a String, //merchant_reference id + pub payment_id: &'a String, + pub merchant_id: &'a String, + pub connector_transaction_id: &'a String, + pub connector: &'a String, + pub connector_refund_id: Option<&'a String>, + pub external_reference_id: Option<&'a String>, + pub refund_type: &'a storage_enums::RefundType, + pub total_amount: &'a i64, + pub currency: &'a storage_enums::Currency, + pub refund_amount: &'a i64, + pub refund_status: &'a storage_enums::RefundStatus, + pub sent_to_gateway: &'a bool, + pub refund_error_message: Option<&'a String>, + pub refund_arn: Option<&'a String>, + #[serde(default, with = "time::serde::timestamp")] + pub created_at: OffsetDateTime, + #[serde(default, with = "time::serde::timestamp")] + pub modified_at: OffsetDateTime, + pub description: Option<&'a String>, + pub attempt_id: &'a String, + pub refund_reason: Option<&'a String>, + pub refund_error_code: Option<&'a String>, +} + +impl<'a> KafkaRefund<'a> { + pub fn from_storage(refund: &'a Refund) -> Self { + Self { + internal_reference_id: &refund.internal_reference_id, + refund_id: &refund.refund_id, + payment_id: &refund.payment_id, + merchant_id: &refund.merchant_id, + connector_transaction_id: &refund.connector_transaction_id, + connector: &refund.connector, + connector_refund_id: refund.connector_refund_id.as_ref(), + external_reference_id: refund.external_reference_id.as_ref(), + refund_type: &refund.refund_type, + total_amount: &refund.total_amount, + currency: &refund.currency, + refund_amount: &refund.refund_amount, + refund_status: &refund.refund_status, + sent_to_gateway: &refund.sent_to_gateway, + refund_error_message: refund.refund_error_message.as_ref(), + refund_arn: refund.refund_arn.as_ref(), + created_at: refund.created_at.assume_utc(), + modified_at: refund.updated_at.assume_utc(), + description: refund.description.as_ref(), + attempt_id: &refund.attempt_id, + refund_reason: refund.refund_reason.as_ref(), + refund_error_code: refund.refund_error_code.as_ref(), + } + } +} + +impl<'a> super::KafkaMessage for KafkaRefund<'a> { + fn key(&self) -> String { + format!( + "{}_{}_{}_{}", + self.merchant_id, self.payment_id, self.attempt_id, self.refund_id + ) + } +} diff --git a/crates/router/src/types.rs b/crates/router/src/types.rs index cd37fbb549d9..c267a54cc57b 100644 --- a/crates/router/src/types.rs +++ b/crates/router/src/types.rs @@ -33,7 +33,7 @@ use crate::{ payments::{PaymentData, RecurringMandatePaymentData}, }, services, - types::storage::payment_attempt::PaymentAttemptExt, + types::{storage::payment_attempt::PaymentAttemptExt, transformers::ForeignFrom}, utils::OptionExt, }; @@ -381,6 +381,7 @@ pub struct PaymentsAuthorizeData { pub payment_method_type: Option, pub surcharge_details: Option, pub customer_id: Option, + pub request_incremental_authorization: bool, } #[derive(Debug, Clone, Default)] @@ -536,6 +537,7 @@ pub struct SetupMandateRequestData { pub email: Option, pub return_url: Option, pub payment_method_type: Option, + pub request_incremental_authorization: bool, } #[derive(Debug, Clone)] @@ -669,6 +671,7 @@ pub enum PaymentsResponseData { connector_metadata: Option, network_txn_id: Option, connector_response_reference_id: Option, + incremental_authorization_allowed: Option, }, MultipleCaptureResponse { // pending_capture_id_list: Vec, @@ -942,6 +945,78 @@ pub enum ConnectorAuthType { NoKey, } +impl From for ConnectorAuthType { + fn from(value: api_models::admin::ConnectorAuthType) -> Self { + match value { + api_models::admin::ConnectorAuthType::TemporaryAuth => Self::TemporaryAuth, + api_models::admin::ConnectorAuthType::HeaderKey { api_key } => { + Self::HeaderKey { api_key } + } + api_models::admin::ConnectorAuthType::BodyKey { api_key, key1 } => { + Self::BodyKey { api_key, key1 } + } + api_models::admin::ConnectorAuthType::SignatureKey { + api_key, + key1, + api_secret, + } => Self::SignatureKey { + api_key, + key1, + api_secret, + }, + api_models::admin::ConnectorAuthType::MultiAuthKey { + api_key, + key1, + api_secret, + key2, + } => Self::MultiAuthKey { + api_key, + key1, + api_secret, + key2, + }, + api_models::admin::ConnectorAuthType::CurrencyAuthKey { auth_key_map } => { + Self::CurrencyAuthKey { auth_key_map } + } + api_models::admin::ConnectorAuthType::NoKey => Self::NoKey, + } + } +} + +impl ForeignFrom for api_models::admin::ConnectorAuthType { + fn foreign_from(from: ConnectorAuthType) -> Self { + match from { + ConnectorAuthType::TemporaryAuth => Self::TemporaryAuth, + ConnectorAuthType::HeaderKey { api_key } => Self::HeaderKey { api_key }, + ConnectorAuthType::BodyKey { api_key, key1 } => Self::BodyKey { api_key, key1 }, + ConnectorAuthType::SignatureKey { + api_key, + key1, + api_secret, + } => Self::SignatureKey { + api_key, + key1, + api_secret, + }, + ConnectorAuthType::MultiAuthKey { + api_key, + key1, + api_secret, + key2, + } => Self::MultiAuthKey { + api_key, + key1, + api_secret, + key2, + }, + ConnectorAuthType::CurrencyAuthKey { auth_key_map } => { + Self::CurrencyAuthKey { auth_key_map } + } + ConnectorAuthType::NoKey => Self::NoKey, + } + } +} + #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct ConnectorsList { pub connectors: Vec, @@ -1128,6 +1203,7 @@ impl From<&SetupMandateRouterData> for PaymentsAuthorizeData { payment_method_type: None, customer_id: None, surcharge_details: None, + request_incremental_authorization: data.request.request_incremental_authorization, } } } diff --git a/crates/router/src/types/api.rs b/crates/router/src/types/api.rs index bcb3a9add553..96bcaca3ed5d 100644 --- a/crates/router/src/types/api.rs +++ b/crates/router/src/types/api.rs @@ -13,6 +13,8 @@ pub mod payments; pub mod payouts; pub mod refunds; pub mod routing; +#[cfg(feature = "olap")] +pub mod verify_connector; pub mod webhooks; use std::{fmt::Debug, str::FromStr}; diff --git a/crates/router/src/types/api/admin.rs b/crates/router/src/types/api/admin.rs index 6bbe9149f4d7..fe99d084223a 100644 --- a/crates/router/src/types/api/admin.rs +++ b/crates/router/src/types/api/admin.rs @@ -124,9 +124,10 @@ impl ForeignTryFrom<(domain::MerchantAccount, BusinessProfileCreate)> .unwrap_or(merchant_account.redirect_to_merchant_with_http_post), webhook_details: webhook_details.or(merchant_account.webhook_details), metadata: request.metadata, - routing_algorithm: request - .routing_algorithm - .or(merchant_account.routing_algorithm), + routing_algorithm: Some(serde_json::json!({ + "algorithm_id": null, + "timestamp": 0 + })), intent_fulfillment_time: request .intent_fulfillment_time .map(i64::from) diff --git a/crates/router/src/types/api/verify_connector.rs b/crates/router/src/types/api/verify_connector.rs new file mode 100644 index 000000000000..74b15f911b9a --- /dev/null +++ b/crates/router/src/types/api/verify_connector.rs @@ -0,0 +1,182 @@ +pub mod paypal; +pub mod stripe; + +use error_stack::{IntoReport, ResultExt}; + +use crate::{ + consts, + core::errors, + services, + services::ConnectorIntegration, + types::{self, api, storage::enums as storage_enums}, + AppState, +}; + +#[derive(Clone, Debug)] +pub struct VerifyConnectorData { + pub connector: &'static (dyn types::api::Connector + Sync), + pub connector_auth: types::ConnectorAuthType, + pub card_details: api::Card, +} + +impl VerifyConnectorData { + fn get_payment_authorize_data(&self) -> types::PaymentsAuthorizeData { + types::PaymentsAuthorizeData { + payment_method_data: api::PaymentMethodData::Card(self.card_details.clone()), + email: None, + amount: 1000, + confirm: true, + currency: storage_enums::Currency::USD, + mandate_id: None, + webhook_url: None, + customer_id: None, + off_session: None, + browser_info: None, + session_token: None, + order_details: None, + order_category: None, + capture_method: None, + enrolled_for_3ds: false, + router_return_url: None, + surcharge_details: None, + setup_future_usage: None, + payment_experience: None, + payment_method_type: None, + statement_descriptor: None, + setup_mandate_details: None, + complete_authorize_url: None, + related_transaction_id: None, + statement_descriptor_suffix: None, + request_incremental_authorization: false, + } + } + + fn get_router_data( + &self, + request_data: R1, + access_token: Option, + ) -> types::RouterData { + let attempt_id = + common_utils::generate_id_with_default_len(consts::VERIFY_CONNECTOR_ID_PREFIX); + types::RouterData { + flow: std::marker::PhantomData, + status: storage_enums::AttemptStatus::Started, + request: request_data, + response: Err(errors::ApiErrorResponse::InternalServerError.into()), + connector: self.connector.id().to_string(), + auth_type: storage_enums::AuthenticationType::NoThreeDs, + test_mode: None, + return_url: None, + attempt_id: attempt_id.clone(), + description: None, + customer_id: None, + merchant_id: consts::VERIFY_CONNECTOR_MERCHANT_ID.to_string(), + reference_id: None, + access_token, + session_token: None, + payment_method: storage_enums::PaymentMethod::Card, + amount_captured: None, + preprocessing_id: None, + payment_method_id: None, + connector_customer: None, + connector_auth_type: self.connector_auth.clone(), + connector_meta_data: None, + payment_method_token: None, + connector_api_version: None, + recurring_mandate_payment_data: None, + connector_request_reference_id: attempt_id, + address: types::PaymentAddress { + shipping: None, + billing: None, + }, + payment_id: common_utils::generate_id_with_default_len( + consts::VERIFY_CONNECTOR_ID_PREFIX, + ), + #[cfg(feature = "payouts")] + payout_method_data: None, + #[cfg(feature = "payouts")] + quote_id: None, + payment_method_balance: None, + connector_http_status_code: None, + external_latency: None, + apple_pay_flow: None, + } + } +} + +#[async_trait::async_trait] +pub trait VerifyConnector { + async fn verify( + state: &AppState, + connector_data: VerifyConnectorData, + ) -> errors::RouterResponse<()> { + let authorize_data = connector_data.get_payment_authorize_data(); + let access_token = Self::get_access_token(state, connector_data.clone()).await?; + let router_data = connector_data.get_router_data(authorize_data, access_token); + + let request = connector_data + .connector + .build_request(&router_data, &state.conf.connectors) + .change_context(errors::ApiErrorResponse::InvalidRequestData { + message: "Payment request cannot be built".to_string(), + })? + .ok_or(errors::ApiErrorResponse::InternalServerError)?; + + let response = services::call_connector_api(&state.to_owned(), request) + .await + .change_context(errors::ApiErrorResponse::InternalServerError)?; + + match response { + Ok(_) => Ok(services::ApplicationResponse::StatusOk), + Err(error_response) => { + Self::handle_payment_error_response::< + api::Authorize, + types::PaymentsAuthorizeData, + types::PaymentsResponseData, + >(connector_data.connector, error_response) + .await + } + } + } + + async fn get_access_token( + _state: &AppState, + _connector_data: VerifyConnectorData, + ) -> errors::CustomResult, errors::ApiErrorResponse> { + // AccessToken is None for the connectors without the AccessToken Flow. + // If a connector has that, then it should override this implementation. + Ok(None) + } + + async fn handle_payment_error_response( + connector: &(dyn types::api::Connector + Sync), + error_response: types::Response, + ) -> errors::RouterResponse<()> + where + dyn types::api::Connector + Sync: ConnectorIntegration, + { + let error = connector + .get_error_response(error_response) + .change_context(errors::ApiErrorResponse::InternalServerError)?; + Err(errors::ApiErrorResponse::InvalidRequestData { + message: error.reason.unwrap_or(error.message), + }) + .into_report() + } + + async fn handle_access_token_error_response( + connector: &(dyn types::api::Connector + Sync), + error_response: types::Response, + ) -> errors::RouterResult> + where + dyn types::api::Connector + Sync: ConnectorIntegration, + { + let error = connector + .get_error_response(error_response) + .change_context(errors::ApiErrorResponse::InternalServerError)?; + Err(errors::ApiErrorResponse::InvalidRequestData { + message: error.reason.unwrap_or(error.message), + }) + .into_report() + } +} diff --git a/crates/router/src/types/api/verify_connector/paypal.rs b/crates/router/src/types/api/verify_connector/paypal.rs new file mode 100644 index 000000000000..33e848f909df --- /dev/null +++ b/crates/router/src/types/api/verify_connector/paypal.rs @@ -0,0 +1,54 @@ +use error_stack::ResultExt; + +use super::{VerifyConnector, VerifyConnectorData}; +use crate::{ + connector, + core::errors, + routes::AppState, + services, + types::{self, api}, +}; + +#[async_trait::async_trait] +impl VerifyConnector for connector::Paypal { + async fn get_access_token( + state: &AppState, + connector_data: VerifyConnectorData, + ) -> errors::CustomResult, errors::ApiErrorResponse> { + let token_data: types::AccessTokenRequestData = + connector_data.connector_auth.clone().try_into()?; + let router_data = connector_data.get_router_data(token_data, None); + + let request = connector_data + .connector + .build_request(&router_data, &state.conf.connectors) + .change_context(errors::ApiErrorResponse::InvalidRequestData { + message: "Payment request cannot be built".to_string(), + })? + .ok_or(errors::ApiErrorResponse::InternalServerError)?; + + let response = services::call_connector_api(&state.to_owned(), request) + .await + .change_context(errors::ApiErrorResponse::InternalServerError)?; + + match response { + Ok(res) => Some( + connector_data + .connector + .handle_response(&router_data, res) + .change_context(errors::ApiErrorResponse::InternalServerError)? + .response + .map_err(|_| errors::ApiErrorResponse::InternalServerError.into()), + ) + .transpose(), + Err(response_data) => { + Self::handle_access_token_error_response::< + api::AccessTokenAuth, + types::AccessTokenRequestData, + types::AccessToken, + >(connector_data.connector, response_data) + .await + } + } + } +} diff --git a/crates/router/src/types/api/verify_connector/stripe.rs b/crates/router/src/types/api/verify_connector/stripe.rs new file mode 100644 index 000000000000..ece9fa15a1d9 --- /dev/null +++ b/crates/router/src/types/api/verify_connector/stripe.rs @@ -0,0 +1,36 @@ +use error_stack::{IntoReport, ResultExt}; +use router_env::env; + +use super::VerifyConnector; +use crate::{ + connector, + core::errors, + services::{self, ConnectorIntegration}, + types, +}; + +#[async_trait::async_trait] +impl VerifyConnector for connector::Stripe { + async fn handle_payment_error_response( + connector: &(dyn types::api::Connector + Sync), + error_response: types::Response, + ) -> errors::RouterResponse<()> + where + dyn types::api::Connector + Sync: ConnectorIntegration, + { + let error = connector + .get_error_response(error_response) + .change_context(errors::ApiErrorResponse::InternalServerError)?; + match (env::which(), error.code.as_str()) { + // In situations where an attempt is made to process a payment using a + // Stripe production key along with a test card (which verify_connector is using), + // Stripe will respond with a "card_declined" error. In production, + // when this scenario occurs we will send back an "Ok" response. + (env::Env::Production, "card_declined") => Ok(services::ApplicationResponse::StatusOk), + _ => Err(errors::ApiErrorResponse::InvalidRequestData { + message: error.reason.unwrap_or(error.message), + }) + .into_report(), + } + } +} diff --git a/crates/router/src/types/domain/user.rs b/crates/router/src/types/domain/user.rs index c053b0f15448..082b29d80941 100644 --- a/crates/router/src/types/domain/user.rs +++ b/crates/router/src/types/domain/user.rs @@ -1,6 +1,8 @@ use std::{collections::HashSet, ops, str::FromStr}; -use api_models::{admin as admin_api, organization as api_org, user as user_api}; +use api_models::{ + admin as admin_api, organization as api_org, user as user_api, user_role as user_role_api, +}; use common_utils::pii; use diesel_models::{ enums::UserStatus, @@ -12,21 +14,27 @@ use diesel_models::{ use error_stack::{IntoReport, ResultExt}; use masking::{ExposeInterface, PeekInterface, Secret}; use once_cell::sync::Lazy; +use router_env::env; use unicode_segmentation::UnicodeSegmentation; use crate::{ - consts::user as consts, + consts, core::{ admin, errors::{UserErrors, UserResult}, }, db::StorageInterface, routes::AppState, - services::authentication::AuthToken, + services::{ + authentication::{AuthToken, UserFromToken}, + authorization::{info, predefined_permissions}, + }, types::transformers::ForeignFrom, utils::user::password, }; +pub mod dashboard_metadata; + #[derive(Clone)] pub struct UserName(Secret); @@ -34,7 +42,7 @@ impl UserName { pub fn new(name: Secret) -> UserResult { let name = name.expose(); let is_empty_or_whitespace = name.trim().is_empty(); - let is_too_long = name.graphemes(true).count() > consts::MAX_NAME_LENGTH; + let is_too_long = name.graphemes(true).count() > consts::user::MAX_NAME_LENGTH; let forbidden_characters = ['/', '(', ')', '"', '<', '>', '\\', '{', '}']; let contains_forbidden_characters = name.chars().any(|g| forbidden_characters.contains(&g)); @@ -165,7 +173,8 @@ impl UserCompanyName { pub fn new(company_name: String) -> UserResult { let company_name = company_name.trim(); let is_empty_or_whitespace = company_name.is_empty(); - let is_too_long = company_name.graphemes(true).count() > consts::MAX_COMPANY_NAME_LENGTH; + let is_too_long = + company_name.graphemes(true).count() > consts::user::MAX_COMPANY_NAME_LENGTH; let is_all_valid_characters = company_name .chars() @@ -214,9 +223,47 @@ impl From for NewUserOrganization { } } +impl From for NewUserOrganization { + fn from(_value: user_api::CreateInternalUserRequest) -> Self { + let new_organization = api_org::OrganizationNew::new(None); + let db_organization = ForeignFrom::foreign_from(new_organization); + Self(db_organization) + } +} + +impl From for NewUserOrganization { + fn from(value: UserMerchantCreateRequestWithToken) -> Self { + Self(diesel_org::OrganizationNew { + org_id: value.2.org_id, + org_name: Some(value.1.company_name), + }) + } +} + +#[derive(Clone)] +pub struct MerchantId(String); + +impl MerchantId { + pub fn new(merchant_id: String) -> UserResult { + let merchant_id = merchant_id.trim().to_lowercase().replace(' ', "_"); + let is_empty_or_whitespace = merchant_id.is_empty(); + + let is_all_valid_characters = merchant_id.chars().all(|x| x.is_alphanumeric() || x == '_'); + if is_empty_or_whitespace || !is_all_valid_characters { + Err(UserErrors::MerchantIdParsingError.into()) + } else { + Ok(Self(merchant_id.to_string())) + } + } + + pub fn get_secret(&self) -> String { + self.0.clone() + } +} + #[derive(Clone)] pub struct NewUserMerchant { - merchant_id: String, + merchant_id: MerchantId, company_name: Option, new_organization: NewUserOrganization, } @@ -227,7 +274,7 @@ impl NewUserMerchant { } pub fn get_merchant_id(&self) -> String { - self.merchant_id.clone() + self.merchant_id.get_secret() } pub fn get_new_organization(&self) -> NewUserOrganization { @@ -291,7 +338,10 @@ impl TryFrom for NewUserMerchant { type Error = error_stack::Report; fn try_from(value: user_api::ConnectAccountRequest) -> UserResult { - let merchant_id = format!("merchant_{}", common_utils::date_time::now_unix_timestamp()); + let merchant_id = MerchantId::new(format!( + "merchant_{}", + common_utils::date_time::now_unix_timestamp() + ))?; let new_organization = NewUserOrganization::from(value); Ok(Self { @@ -302,6 +352,45 @@ impl TryFrom for NewUserMerchant { } } +impl TryFrom for NewUserMerchant { + type Error = error_stack::Report; + + fn try_from(value: user_api::CreateInternalUserRequest) -> UserResult { + let merchant_id = + MerchantId::new(consts::user_role::INTERNAL_USER_MERCHANT_ID.to_string())?; + let new_organization = NewUserOrganization::from(value); + + Ok(Self { + company_name: None, + merchant_id, + new_organization, + }) + } +} + +type UserMerchantCreateRequestWithToken = + (UserFromStorage, user_api::UserMerchantCreate, UserFromToken); + +impl TryFrom for NewUserMerchant { + type Error = error_stack::Report; + + fn try_from(value: UserMerchantCreateRequestWithToken) -> UserResult { + let merchant_id = if matches!(env::which(), env::Env::Production) { + MerchantId::new(value.1.company_name.clone())? + } else { + MerchantId::new(format!( + "merchant_{}", + common_utils::date_time::now_unix_timestamp() + ))? + }; + Ok(Self { + merchant_id, + company_name: Some(UserCompanyName::new(value.1.company_name.clone())?), + new_organization: NewUserOrganization::from(value), + }) + } +} + #[derive(Clone)] pub struct NewUser { user_id: String, @@ -426,6 +515,44 @@ impl TryFrom for NewUser { } } +impl TryFrom for NewUser { + type Error = error_stack::Report; + + fn try_from(value: user_api::CreateInternalUserRequest) -> UserResult { + let user_id = uuid::Uuid::new_v4().to_string(); + let email = value.email.clone().try_into()?; + let name = UserName::new(value.name.clone())?; + let password = UserPassword::new(value.password.clone())?; + let new_merchant = NewUserMerchant::try_from(value)?; + + Ok(Self { + user_id, + name, + email, + password, + new_merchant, + }) + } +} + +impl TryFrom for NewUser { + type Error = error_stack::Report; + + fn try_from(value: UserMerchantCreateRequestWithToken) -> Result { + let user = value.0.clone(); + let new_merchant = NewUserMerchant::try_from(value)?; + + Ok(Self { + user_id: user.0.user_id, + name: UserName::new(user.0.name)?, + email: user.0.email.clone().try_into()?, + password: UserPassword::new(user.0.password)?, + new_merchant, + }) + } +} + +#[derive(Clone)] pub struct UserFromStorage(pub storage_user::User); impl From for UserFromStorage { @@ -473,6 +600,23 @@ impl UserFromStorage { .await } + pub async fn get_jwt_auth_token_with_custom_merchant_id( + &self, + state: AppState, + merchant_id: String, + org_id: String, + ) -> UserResult { + let role_id = self.get_role_from_db(state.clone()).await?.role_id; + AuthToken::new_token( + self.0.user_id.clone(), + merchant_id, + role_id, + &state.conf, + org_id, + ) + .await + } + pub async fn get_role_from_db(&self, state: AppState) -> UserResult { state .store @@ -481,3 +625,76 @@ impl UserFromStorage { .change_context(UserErrors::InternalServerError) } } + +impl TryFrom for user_role_api::ModuleInfo { + type Error = (); + fn try_from(value: info::ModuleInfo) -> Result { + let mut permissions = Vec::with_capacity(value.permissions.len()); + for permission in value.permissions { + let permission = permission.try_into()?; + permissions.push(permission); + } + Ok(Self { + module: value.module.into(), + description: value.description, + permissions, + }) + } +} + +impl From for user_role_api::PermissionModule { + fn from(value: info::PermissionModule) -> Self { + match value { + info::PermissionModule::Payments => Self::Payments, + info::PermissionModule::Refunds => Self::Refunds, + info::PermissionModule::MerchantAccount => Self::MerchantAccount, + info::PermissionModule::Forex => Self::Forex, + info::PermissionModule::Connectors => Self::Connectors, + info::PermissionModule::Routing => Self::Routing, + info::PermissionModule::Analytics => Self::Analytics, + info::PermissionModule::Mandates => Self::Mandates, + info::PermissionModule::Disputes => Self::Disputes, + info::PermissionModule::Files => Self::Files, + info::PermissionModule::ThreeDsDecisionManager => Self::ThreeDsDecisionManager, + info::PermissionModule::SurchargeDecisionManager => Self::SurchargeDecisionManager, + } + } +} + +impl TryFrom for user_role_api::PermissionInfo { + type Error = (); + fn try_from(value: info::PermissionInfo) -> Result { + let enum_name = (&value.enum_name).try_into()?; + Ok(Self { + enum_name, + description: value.description, + }) + } +} + +pub struct UserAndRoleJoined(pub storage_user::User, pub UserRole); + +impl TryFrom for user_api::UserDetails { + type Error = (); + fn try_from(user_and_role: UserAndRoleJoined) -> Result { + let status = match user_and_role.1.status { + UserStatus::Active => user_role_api::UserStatus::Active, + UserStatus::InvitationSent => user_role_api::UserStatus::InvitationSent, + }; + + let role_id = user_and_role.1.role_id; + let role_name = predefined_permissions::get_role_name_from_id(role_id.as_str()) + .ok_or(())? + .to_string(); + + Ok(Self { + user_id: user_and_role.0.user_id, + email: user_and_role.0.email, + name: user_and_role.0.name, + role_id, + status, + role_name, + last_modified_at: user_and_role.1.last_modified_at, + }) + } +} diff --git a/crates/router/src/types/domain/user/dashboard_metadata.rs b/crates/router/src/types/domain/user/dashboard_metadata.rs new file mode 100644 index 000000000000..e65379346ac9 --- /dev/null +++ b/crates/router/src/types/domain/user/dashboard_metadata.rs @@ -0,0 +1,56 @@ +use api_models::user::dashboard_metadata as api; +use diesel_models::enums::DashboardMetadata as DBEnum; +use masking::Secret; +use time::PrimitiveDateTime; + +pub enum MetaData { + ProductionAgreement(ProductionAgreementValue), + SetupProcessor(api::SetupProcessor), + ConfigureEndpoint(bool), + SetupComplete(bool), + FirstProcessorConnected(api::ProcessorConnected), + SecondProcessorConnected(api::ProcessorConnected), + ConfiguredRouting(api::ConfiguredRouting), + TestPayment(api::TestPayment), + IntegrationMethod(api::IntegrationMethod), + IntegrationCompleted(bool), + StripeConnected(api::ProcessorConnected), + PaypalConnected(api::ProcessorConnected), + SPRoutingConfigured(api::ConfiguredRouting), + SPTestPayment(bool), + DownloadWoocom(bool), + ConfigureWoocom(bool), + SetupWoocomWebhook(bool), + IsMultipleConfiguration(bool), +} + +impl From<&MetaData> for DBEnum { + fn from(value: &MetaData) -> Self { + match value { + MetaData::ProductionAgreement(_) => Self::ProductionAgreement, + MetaData::SetupProcessor(_) => Self::SetupProcessor, + MetaData::ConfigureEndpoint(_) => Self::ConfigureEndpoint, + MetaData::SetupComplete(_) => Self::SetupComplete, + MetaData::FirstProcessorConnected(_) => Self::FirstProcessorConnected, + MetaData::SecondProcessorConnected(_) => Self::SecondProcessorConnected, + MetaData::ConfiguredRouting(_) => Self::ConfiguredRouting, + MetaData::TestPayment(_) => Self::TestPayment, + MetaData::IntegrationMethod(_) => Self::IntegrationMethod, + MetaData::IntegrationCompleted(_) => Self::IntegrationCompleted, + MetaData::StripeConnected(_) => Self::StripeConnected, + MetaData::PaypalConnected(_) => Self::PaypalConnected, + MetaData::SPRoutingConfigured(_) => Self::SpRoutingConfigured, + MetaData::SPTestPayment(_) => Self::SpTestPayment, + MetaData::DownloadWoocom(_) => Self::DownloadWoocom, + MetaData::ConfigureWoocom(_) => Self::ConfigureWoocom, + MetaData::SetupWoocomWebhook(_) => Self::SetupWoocomWebhook, + MetaData::IsMultipleConfiguration(_) => Self::IsMultipleConfiguration, + } + } +} +#[derive(Debug, serde::Serialize)] +pub struct ProductionAgreementValue { + pub version: String, + pub ip_address: Secret, + pub timestamp: PrimitiveDateTime, +} diff --git a/crates/router/src/types/storage.rs b/crates/router/src/types/storage.rs index e3e19323357b..a83a405f3554 100644 --- a/crates/router/src/types/storage.rs +++ b/crates/router/src/types/storage.rs @@ -5,6 +5,7 @@ pub mod capture; pub mod cards_info; pub mod configs; pub mod customers; +pub mod dashboard_metadata; pub mod dispute; pub mod enums; pub mod ephemeral_key; @@ -42,11 +43,11 @@ pub use data_models::payments::{ }; pub use self::{ - address::*, api_keys::*, capture::*, cards_info::*, configs::*, customers::*, dispute::*, - ephemeral_key::*, events::*, file::*, gsm::*, locker_mock_up::*, mandate::*, - merchant_account::*, merchant_connector_account::*, merchant_key_store::*, payment_link::*, - payment_method::*, payout_attempt::*, payouts::*, process_tracker::*, refund::*, - reverse_lookup::*, routing_algorithm::*, user::*, user_role::*, + address::*, api_keys::*, capture::*, cards_info::*, configs::*, customers::*, + dashboard_metadata::*, dispute::*, ephemeral_key::*, events::*, file::*, gsm::*, + locker_mock_up::*, mandate::*, merchant_account::*, merchant_connector_account::*, + merchant_key_store::*, payment_link::*, payment_method::*, payout_attempt::*, payouts::*, + process_tracker::*, refund::*, reverse_lookup::*, routing_algorithm::*, user::*, user_role::*, }; use crate::types::api::routing; diff --git a/crates/router/src/types/storage/dashboard_metadata.rs b/crates/router/src/types/storage/dashboard_metadata.rs new file mode 100644 index 000000000000..d804dfb1ff8b --- /dev/null +++ b/crates/router/src/types/storage/dashboard_metadata.rs @@ -0,0 +1 @@ +pub use diesel_models::user::dashboard_metadata::*; diff --git a/crates/router/src/types/storage/payment_attempt.rs b/crates/router/src/types/storage/payment_attempt.rs index f94d06997ca9..13b9f3dd5d5c 100644 --- a/crates/router/src/types/storage/payment_attempt.rs +++ b/crates/router/src/types/storage/payment_attempt.rs @@ -7,7 +7,6 @@ use error_stack::ResultExt; use crate::{ core::errors, errors::RouterResult, types::transformers::ForeignFrom, utils::OptionExt, }; - pub trait PaymentAttemptExt { fn make_new_capture( &self, @@ -134,9 +133,7 @@ mod tests { use crate::configs::settings::Settings; let conf = Settings::new().expect("invalid settings"); let tx: oneshot::Sender<()> = oneshot::channel().0; - let api_client = Box::new(services::MockApiClient); - let state = routes::AppState::with_storage(conf, StorageImpl::PostgresqlTest, tx, api_client).await; @@ -187,7 +184,6 @@ mod tests { let tx: oneshot::Sender<()> = oneshot::channel().0; let api_client = Box::new(services::MockApiClient); - let state = routes::AppState::with_storage(conf, StorageImpl::PostgresqlTest, tx, api_client).await; let current_time = common_utils::date_time::now(); diff --git a/crates/router/src/utils.rs b/crates/router/src/utils.rs index c936ee858c17..f1590342e17c 100644 --- a/crates/router/src/utils.rs +++ b/crates/router/src/utils.rs @@ -6,6 +6,10 @@ pub mod ext_traits; pub mod storage_partitioning; #[cfg(feature = "olap")] pub mod user; +#[cfg(feature = "olap")] +pub mod user_role; +#[cfg(feature = "olap")] +pub mod verify_connector; use std::fmt::Debug; diff --git a/crates/router/src/utils/user.rs b/crates/router/src/utils/user.rs index c72e4b9feb3c..696aa4090044 100644 --- a/crates/router/src/utils/user.rs +++ b/crates/router/src/utils/user.rs @@ -1 +1,70 @@ +use diesel_models::enums::UserStatus; +use error_stack::ResultExt; + +use crate::{ + core::errors::{UserErrors, UserResult}, + routes::AppState, + services::authentication::UserFromToken, + types::domain::MerchantAccount, +}; + +pub mod dashboard_metadata; pub mod password; +#[cfg(feature = "dummy_connector")] +pub mod sample_data; + +impl UserFromToken { + pub async fn get_merchant_account(&self, state: AppState) -> UserResult { + let key_store = state + .store + .get_merchant_key_store_by_merchant_id( + &self.merchant_id, + &state.store.get_master_key().to_vec().into(), + ) + .await + .map_err(|e| { + if e.current_context().is_db_not_found() { + e.change_context(UserErrors::MerchantIdNotFound) + } else { + e.change_context(UserErrors::InternalServerError) + } + })?; + let merchant_account = state + .store + .find_merchant_account_by_merchant_id(&self.merchant_id, &key_store) + .await + .map_err(|e| { + if e.current_context().is_db_not_found() { + e.change_context(UserErrors::MerchantIdNotFound) + } else { + e.change_context(UserErrors::InternalServerError) + } + })?; + Ok(merchant_account) + } + + pub async fn get_user(&self, state: AppState) -> UserResult { + let user = state + .store + .find_user_by_id(&self.user_id) + .await + .change_context(UserErrors::InternalServerError)?; + Ok(user) + } +} + +pub async fn get_merchant_ids_for_user(state: AppState, user_id: &str) -> UserResult> { + Ok(state + .store + .list_user_roles_by_user_id(user_id) + .await + .change_context(UserErrors::InternalServerError)? + .into_iter() + .filter_map(|ele| { + if ele.status == UserStatus::Active { + return Some(ele.merchant_id); + } + None + }) + .collect()) +} diff --git a/crates/router/src/utils/user/dashboard_metadata.rs b/crates/router/src/utils/user/dashboard_metadata.rs new file mode 100644 index 000000000000..5f354e613f95 --- /dev/null +++ b/crates/router/src/utils/user/dashboard_metadata.rs @@ -0,0 +1,162 @@ +use std::{net::IpAddr, str::FromStr}; + +use actix_web::http::header::HeaderMap; +use api_models::user::dashboard_metadata::{ + GetMetaDataRequest, GetMultipleMetaDataPayload, SetMetaDataRequest, +}; +use diesel_models::{ + enums::DashboardMetadata as DBEnum, + user::dashboard_metadata::{DashboardMetadata, DashboardMetadataNew}, +}; +use error_stack::{IntoReport, ResultExt}; +use masking::Secret; + +use crate::{ + core::errors::{UserErrors, UserResult}, + headers, AppState, +}; + +pub async fn insert_merchant_scoped_metadata_to_db( + state: &AppState, + user_id: String, + merchant_id: String, + org_id: String, + metadata_key: DBEnum, + metadata_value: impl serde::Serialize, +) -> UserResult { + let now = common_utils::date_time::now(); + let data_value = serde_json::to_value(metadata_value) + .into_report() + .change_context(UserErrors::InternalServerError) + .attach_printable("Error Converting Struct To Serde Value")?; + state + .store + .insert_metadata(DashboardMetadataNew { + user_id: None, + merchant_id, + org_id, + data_key: metadata_key, + data_value, + created_by: user_id.clone(), + created_at: now, + last_modified_by: user_id, + last_modified_at: now, + }) + .await + .map_err(|e| { + if e.current_context().is_db_unique_violation() { + return e.change_context(UserErrors::MetadataAlreadySet); + } + e.change_context(UserErrors::InternalServerError) + }) +} + +pub async fn get_merchant_scoped_metadata_from_db( + state: &AppState, + merchant_id: String, + org_id: String, + metadata_keys: Vec, +) -> UserResult> { + match state + .store + .find_merchant_scoped_dashboard_metadata(&merchant_id, &org_id, metadata_keys) + .await + { + Ok(data) => Ok(data), + Err(e) => { + if e.current_context().is_db_not_found() { + return Ok(Vec::with_capacity(0)); + } + Err(e + .change_context(UserErrors::InternalServerError) + .attach_printable("DB Error Fetching DashboardMetaData")) + } + } +} + +pub fn deserialize_to_response(data: Option<&DashboardMetadata>) -> UserResult> +where + T: serde::de::DeserializeOwned, +{ + data.map(|metadata| serde_json::from_value(metadata.data_value.clone())) + .transpose() + .map_err(|_| UserErrors::InternalServerError.into()) + .attach_printable("Error Serializing Metadata from DB") +} + +pub fn separate_metadata_type_based_on_scope( + metadata_keys: Vec, +) -> (Vec, Vec) { + let (mut merchant_scoped, user_scoped) = ( + Vec::with_capacity(metadata_keys.len()), + Vec::with_capacity(metadata_keys.len()), + ); + for key in metadata_keys { + match key { + DBEnum::ProductionAgreement + | DBEnum::SetupProcessor + | DBEnum::ConfigureEndpoint + | DBEnum::SetupComplete + | DBEnum::FirstProcessorConnected + | DBEnum::SecondProcessorConnected + | DBEnum::ConfiguredRouting + | DBEnum::TestPayment + | DBEnum::IntegrationMethod + | DBEnum::IntegrationCompleted + | DBEnum::StripeConnected + | DBEnum::PaypalConnected + | DBEnum::SpRoutingConfigured + | DBEnum::SpTestPayment + | DBEnum::DownloadWoocom + | DBEnum::ConfigureWoocom + | DBEnum::SetupWoocomWebhook + | DBEnum::IsMultipleConfiguration => merchant_scoped.push(key), + } + } + (merchant_scoped, user_scoped) +} + +pub fn is_backfill_required(metadata_key: &DBEnum) -> bool { + matches!( + metadata_key, + DBEnum::StripeConnected | DBEnum::PaypalConnected + ) +} + +pub fn set_ip_address_if_required( + request: &mut SetMetaDataRequest, + headers: &HeaderMap, +) -> UserResult<()> { + if let SetMetaDataRequest::ProductionAgreement(req) = request { + let ip_address_from_request: Secret = headers + .get(headers::X_FORWARDED_FOR) + .ok_or(UserErrors::IpAddressParsingFailed.into()) + .attach_printable("X-Forwarded-For header not found")? + .to_str() + .map_err(|_| UserErrors::IpAddressParsingFailed.into()) + .attach_printable("Error converting Header Value to Str")? + .split(',') + .next() + .and_then(|ip| { + let ip_addr: Result = ip.parse(); + ip_addr.ok() + }) + .ok_or(UserErrors::IpAddressParsingFailed.into()) + .attach_printable("Error Parsing header value to ip")? + .to_string() + .into(); + req.ip_address = Some(ip_address_from_request) + } + Ok(()) +} + +pub fn parse_string_to_enums(query: String) -> UserResult { + Ok(GetMultipleMetaDataPayload { + results: query + .split(',') + .map(GetMetaDataRequest::from_str) + .collect::, _>>() + .map_err(|_| UserErrors::InvalidMetadataRequest.into()) + .attach_printable("Error Parsing to DashboardMetadata enums")?, + }) +} diff --git a/crates/router/src/utils/user/sample_data.rs b/crates/router/src/utils/user/sample_data.rs new file mode 100644 index 000000000000..7a9cf6d2b7db --- /dev/null +++ b/crates/router/src/utils/user/sample_data.rs @@ -0,0 +1,291 @@ +use api_models::{ + enums::Connector::{DummyConnector4, DummyConnector7}, + user::sample_data::SampleDataRequest, +}; +use data_models::payments::payment_intent::PaymentIntentNew; +use diesel_models::{user::sample_data::PaymentAttemptBatchNew, RefundNew}; +use error_stack::{IntoReport, ResultExt}; +use rand::{prelude::SliceRandom, thread_rng, Rng}; +use time::OffsetDateTime; + +use crate::{ + consts, + core::errors::sample_data::{SampleDataError, SampleDataResult}, + AppState, +}; + +#[allow(clippy::type_complexity)] +pub async fn generate_sample_data( + state: &AppState, + req: SampleDataRequest, + merchant_id: &str, +) -> SampleDataResult)>> { + let merchant_id = merchant_id.to_string(); + let sample_data_size: usize = req.record.unwrap_or(100); + + if !(10..=100).contains(&sample_data_size) { + return Err(SampleDataError::InvalidRange.into()); + } + + let key_store = state + .store + .get_merchant_key_store_by_merchant_id( + merchant_id.as_str(), + &state.store.get_master_key().to_vec().into(), + ) + .await + .change_context(SampleDataError::DatabaseError)?; + + let merchant_from_db = state + .store + .find_merchant_account_by_merchant_id(merchant_id.as_str(), &key_store) + .await + .change_context::(SampleDataError::DataDoesNotExist)?; + + let merchant_parsed_details: Vec = + serde_json::from_value(merchant_from_db.primary_business_details.clone()) + .into_report() + .change_context(SampleDataError::InternalServerError) + .attach_printable("Error while parsing primary business details")?; + + let business_country_default = merchant_parsed_details.get(0).map(|x| x.country); + + let business_label_default = merchant_parsed_details.get(0).map(|x| x.business.clone()); + + let profile_id = crate::core::utils::get_profile_id_from_business_details( + business_country_default, + business_label_default.as_ref(), + &merchant_from_db, + req.profile_id.as_ref(), + &*state.store, + false, + ) + .await + .change_context(SampleDataError::InternalServerError) + .attach_printable("Failed to get business profile")?; + + // 10 percent payments should be failed + #[allow(clippy::as_conversions)] + let failure_attempts = usize::try_from((sample_data_size as f32 / 10.0).round() as i64) + .into_report() + .change_context(SampleDataError::InvalidParameters)?; + + let failure_after_attempts = sample_data_size / failure_attempts; + + // 20 percent refunds for payments + #[allow(clippy::as_conversions)] + let number_of_refunds = usize::try_from((sample_data_size as f32 / 5.0).round() as i64) + .into_report() + .change_context(SampleDataError::InvalidParameters)?; + + let mut refunds_count = 0; + + let mut random_array: Vec = (1..=sample_data_size).collect(); + + // Shuffle the array + let mut rng = thread_rng(); + random_array.shuffle(&mut rng); + + let mut res: Vec<(PaymentIntentNew, PaymentAttemptBatchNew, Option)> = Vec::new(); + let start_time = req + .start_time + .unwrap_or(common_utils::date_time::now() - time::Duration::days(7)) + .assume_utc() + .unix_timestamp(); + let end_time = req + .end_time + .unwrap_or_else(common_utils::date_time::now) + .assume_utc() + .unix_timestamp(); + + let current_time = common_utils::date_time::now().assume_utc().unix_timestamp(); + + let min_amount = req.min_amount.unwrap_or(100); + let max_amount = req.max_amount.unwrap_or(min_amount + 100); + + if min_amount > max_amount + || start_time > end_time + || start_time > current_time + || end_time > current_time + { + return Err(SampleDataError::InvalidParameters.into()); + }; + + let currency_vec = req.currency.unwrap_or(vec![common_enums::Currency::USD]); + let currency_vec_len = currency_vec.len(); + + let connector_vec = req + .connector + .unwrap_or(vec![DummyConnector4, DummyConnector7]); + let connector_vec_len = connector_vec.len(); + + let auth_type = req.auth_type.unwrap_or(vec![ + common_enums::AuthenticationType::ThreeDs, + common_enums::AuthenticationType::NoThreeDs, + ]); + let auth_type_len = auth_type.len(); + + if currency_vec_len == 0 || connector_vec_len == 0 || auth_type_len == 0 { + return Err(SampleDataError::InvalidParameters.into()); + } + + for num in 1..=sample_data_size { + let payment_id = common_utils::generate_id_with_default_len("test"); + let attempt_id = crate::utils::get_payment_attempt_id(&payment_id, 1); + let client_secret = common_utils::generate_id( + consts::ID_LENGTH, + format!("{}_secret", payment_id.clone()).as_str(), + ); + let amount = thread_rng().gen_range(min_amount..=max_amount); + + let created_at @ modified_at @ last_synced = + OffsetDateTime::from_unix_timestamp(thread_rng().gen_range(start_time..=end_time)) + .map(common_utils::date_time::convert_to_pdt) + .unwrap_or( + req.start_time.unwrap_or_else(|| { + common_utils::date_time::now() - time::Duration::days(7) + }), + ); + + // After some set of payments sample data will have a failed attempt + let is_failed_payment = + (random_array.get(num - 1).unwrap_or(&0) % failure_after_attempts) == 0; + + let payment_intent = PaymentIntentNew { + payment_id: payment_id.clone(), + merchant_id: merchant_id.clone(), + status: match is_failed_payment { + true => common_enums::IntentStatus::Failed, + _ => common_enums::IntentStatus::Succeeded, + }, + amount: amount * 100, + currency: Some( + *currency_vec + .get((num - 1) % currency_vec_len) + .unwrap_or(&common_enums::Currency::USD), + ), + description: Some("This is a sample payment".to_string()), + created_at: Some(created_at), + modified_at: Some(modified_at), + last_synced: Some(last_synced), + client_secret: Some(client_secret), + business_country: business_country_default, + business_label: business_label_default.clone(), + active_attempt: data_models::RemoteStorageObject::ForeignID(attempt_id.clone()), + attempt_count: 1, + customer_id: Some("hs-dashboard-user".to_string()), + amount_captured: Some(amount * 100), + profile_id: Some(profile_id.clone()), + return_url: Default::default(), + metadata: Default::default(), + connector_id: Default::default(), + shipping_address_id: Default::default(), + billing_address_id: Default::default(), + statement_descriptor_name: Default::default(), + statement_descriptor_suffix: Default::default(), + setup_future_usage: Default::default(), + off_session: Default::default(), + order_details: Default::default(), + allowed_payment_method_types: Default::default(), + connector_metadata: Default::default(), + feature_metadata: Default::default(), + merchant_decision: Default::default(), + payment_link_id: Default::default(), + payment_confirm_source: Default::default(), + updated_by: merchant_from_db.storage_scheme.to_string(), + surcharge_applicable: Default::default(), + request_incremental_authorization: Default::default(), + incremental_authorization_allowed: Default::default(), + }; + let payment_attempt = PaymentAttemptBatchNew { + attempt_id: attempt_id.clone(), + payment_id: payment_id.clone(), + connector_transaction_id: Some(attempt_id.clone()), + merchant_id: merchant_id.clone(), + status: match is_failed_payment { + true => common_enums::AttemptStatus::Failure, + _ => common_enums::AttemptStatus::Charged, + }, + amount: amount * 100, + currency: payment_intent.currency, + connector: Some( + (*connector_vec + .get((num - 1) % connector_vec_len) + .unwrap_or(&DummyConnector4)) + .to_string(), + ), + payment_method: Some(common_enums::PaymentMethod::Card), + payment_method_type: Some(get_payment_method_type(thread_rng().gen_range(1..=2))), + authentication_type: Some( + *auth_type + .get((num - 1) % auth_type_len) + .unwrap_or(&common_enums::AuthenticationType::NoThreeDs), + ), + error_message: match is_failed_payment { + true => Some("This is a test payment which has a failed status".to_string()), + _ => None, + }, + error_code: match is_failed_payment { + true => Some("HS001".to_string()), + _ => None, + }, + confirm: true, + created_at: Some(created_at), + modified_at: Some(modified_at), + last_synced: Some(last_synced), + amount_to_capture: Some(amount * 100), + connector_response_reference_id: Some(attempt_id.clone()), + updated_by: merchant_from_db.storage_scheme.to_string(), + + ..Default::default() + }; + + let refund = if refunds_count < number_of_refunds && !is_failed_payment { + refunds_count += 1; + Some(RefundNew { + refund_id: common_utils::generate_id_with_default_len("test"), + internal_reference_id: common_utils::generate_id_with_default_len("test"), + external_reference_id: None, + payment_id: payment_id.clone(), + attempt_id: attempt_id.clone(), + merchant_id: merchant_id.clone(), + connector_transaction_id: attempt_id.clone(), + connector_refund_id: None, + description: Some("This is a sample refund".to_string()), + created_at: Some(created_at), + modified_at: Some(modified_at), + refund_reason: Some("Sample Refund".to_string()), + connector: payment_attempt + .connector + .clone() + .unwrap_or(DummyConnector4.to_string()), + currency: *currency_vec + .get((num - 1) % currency_vec_len) + .unwrap_or(&common_enums::Currency::USD), + total_amount: amount * 100, + refund_amount: amount * 100, + refund_status: common_enums::RefundStatus::Success, + sent_to_gateway: true, + refund_type: diesel_models::enums::RefundType::InstantRefund, + metadata: None, + refund_arn: None, + profile_id: payment_intent.profile_id.clone(), + updated_by: merchant_from_db.storage_scheme.to_string(), + merchant_connector_id: payment_attempt.merchant_connector_id.clone(), + }) + } else { + None + }; + + res.push((payment_intent, payment_attempt, refund)); + } + Ok(res) +} + +fn get_payment_method_type(num: u8) -> common_enums::PaymentMethodType { + let rem: u8 = (num) % 2; + match rem { + 0 => common_enums::PaymentMethodType::Debit, + _ => common_enums::PaymentMethodType::Credit, + } +} diff --git a/crates/router/src/utils/user_role.rs b/crates/router/src/utils/user_role.rs new file mode 100644 index 000000000000..0026984fdb9a --- /dev/null +++ b/crates/router/src/utils/user_role.rs @@ -0,0 +1,93 @@ +use api_models::user_role as user_role_api; +use diesel_models::enums::UserStatus; +use error_stack::ResultExt; +use router_env::logger; + +use crate::{ + consts, + core::errors::{UserErrors, UserResult}, + routes::AppState, + services::authorization::{ + permissions::Permission, + predefined_permissions::{self, RoleInfo}, + }, +}; + +pub fn is_internal_role(role_id: &str) -> bool { + role_id == consts::user_role::ROLE_ID_INTERNAL_ADMIN + || role_id == consts::user_role::ROLE_ID_INTERNAL_VIEW_ONLY_USER +} + +pub async fn get_merchant_ids_for_user(state: AppState, user_id: &str) -> UserResult> { + Ok(state + .store + .list_user_roles_by_user_id(user_id) + .await + .change_context(UserErrors::InternalServerError)? + .into_iter() + .filter_map(|ele| { + if ele.status == UserStatus::Active { + return Some(ele.merchant_id); + } + None + }) + .collect()) +} + +pub fn validate_role_id(role_id: &str) -> UserResult<()> { + if predefined_permissions::is_role_invitable(role_id) { + return Ok(()); + } + Err(UserErrors::InvalidRoleId.into()) +} + +pub fn get_role_name_and_permission_response( + role_info: &RoleInfo, +) -> Option<(Vec, &'static str)> { + role_info + .get_permissions() + .iter() + .map(TryInto::try_into) + .collect::, _>>() + .ok() + .zip(role_info.get_name()) +} + +impl TryFrom<&Permission> for user_role_api::Permission { + type Error = (); + fn try_from(value: &Permission) -> Result { + match value { + Permission::PaymentRead => Ok(Self::PaymentRead), + Permission::PaymentWrite => Ok(Self::PaymentWrite), + Permission::RefundRead => Ok(Self::RefundRead), + Permission::RefundWrite => Ok(Self::RefundWrite), + Permission::ApiKeyRead => Ok(Self::ApiKeyRead), + Permission::ApiKeyWrite => Ok(Self::ApiKeyWrite), + Permission::MerchantAccountRead => Ok(Self::MerchantAccountRead), + Permission::MerchantAccountWrite => Ok(Self::MerchantAccountWrite), + Permission::MerchantConnectorAccountRead => Ok(Self::MerchantConnectorAccountRead), + Permission::MerchantConnectorAccountWrite => Ok(Self::MerchantConnectorAccountWrite), + Permission::ForexRead => Ok(Self::ForexRead), + Permission::RoutingRead => Ok(Self::RoutingRead), + Permission::RoutingWrite => Ok(Self::RoutingWrite), + Permission::DisputeRead => Ok(Self::DisputeRead), + Permission::DisputeWrite => Ok(Self::DisputeWrite), + Permission::MandateRead => Ok(Self::MandateRead), + Permission::MandateWrite => Ok(Self::MandateWrite), + Permission::FileRead => Ok(Self::FileRead), + Permission::FileWrite => Ok(Self::FileWrite), + Permission::Analytics => Ok(Self::Analytics), + Permission::ThreeDsDecisionManagerWrite => Ok(Self::ThreeDsDecisionManagerWrite), + Permission::ThreeDsDecisionManagerRead => Ok(Self::ThreeDsDecisionManagerRead), + Permission::SurchargeDecisionManagerWrite => Ok(Self::SurchargeDecisionManagerWrite), + Permission::SurchargeDecisionManagerRead => Ok(Self::SurchargeDecisionManagerRead), + Permission::UsersRead => Ok(Self::UsersRead), + Permission::UsersWrite => Ok(Self::UsersWrite), + + Permission::MerchantAccountCreate => { + logger::error!("Invalid use of internal permission"); + Err(()) + } + } + } +} diff --git a/crates/router/src/utils/verify_connector.rs b/crates/router/src/utils/verify_connector.rs new file mode 100644 index 000000000000..6ad683d63ba1 --- /dev/null +++ b/crates/router/src/utils/verify_connector.rs @@ -0,0 +1,49 @@ +use api_models::enums::Connector; +use error_stack::{IntoReport, ResultExt}; + +use crate::{core::errors, types::api}; + +pub fn generate_card_from_details( + card_number: String, + card_exp_year: String, + card_exp_month: String, + card_cvv: String, +) -> errors::RouterResult { + Ok(api::Card { + card_number: card_number + .parse() + .into_report() + .change_context(errors::ApiErrorResponse::InternalServerError) + .attach_printable("Error while parsing card number")?, + card_issuer: None, + card_cvc: masking::Secret::new(card_cvv), + card_network: None, + card_exp_year: masking::Secret::new(card_exp_year), + card_exp_month: masking::Secret::new(card_exp_month), + card_holder_name: masking::Secret::new("HyperSwitch".to_string()), + nick_name: None, + card_type: None, + card_issuing_country: None, + bank_code: None, + }) +} + +pub fn get_test_card_details(connector_name: Connector) -> errors::RouterResult> { + match connector_name { + Connector::Stripe => Some(generate_card_from_details( + "4242424242424242".to_string(), + "2025".to_string(), + "12".to_string(), + "100".to_string(), + )) + .transpose(), + Connector::Paypal => Some(generate_card_from_details( + "4111111111111111".to_string(), + "2025".to_string(), + "02".to_string(), + "123".to_string(), + )) + .transpose(), + _ => Ok(None), + } +} diff --git a/crates/router/src/workflows/payment_sync.rs b/crates/router/src/workflows/payment_sync.rs index f2760a00582d..43567ce27e23 100644 --- a/crates/router/src/workflows/payment_sync.rs +++ b/crates/router/src/workflows/payment_sync.rs @@ -124,7 +124,7 @@ impl ProcessTrackerWorkflow for PaymentsSyncWorkflow { .as_ref() .is_none() { - let payment_intent_update = data_models::payments::payment_intent::PaymentIntentUpdate::PGStatusUpdate { status: api_models::enums::IntentStatus::Failed,updated_by: merchant_account.storage_scheme.to_string() }; + let payment_intent_update = data_models::payments::payment_intent::PaymentIntentUpdate::PGStatusUpdate { status: api_models::enums::IntentStatus::Failed,updated_by: merchant_account.storage_scheme.to_string(), incremental_authorization_allowed: Some(false) }; let payment_attempt_update = data_models::payments::payment_attempt::PaymentAttemptUpdate::ErrorUpdate { connector: None, diff --git a/crates/router/tests/connectors/aci.rs b/crates/router/tests/connectors/aci.rs index c9ee3a34f2ef..7ddc504956fb 100644 --- a/crates/router/tests/connectors/aci.rs +++ b/crates/router/tests/connectors/aci.rs @@ -69,6 +69,7 @@ fn construct_payment_router_data() -> types::PaymentsAuthorizeRouterData { complete_authorize_url: None, customer_id: None, surcharge_details: None, + request_incremental_authorization: false, }, response: Err(types::ErrorResponse::default()), payment_method_id: None, @@ -160,6 +161,7 @@ fn construct_refund_router_data() -> types::RefundsRouterData { async fn payments_create_success() { let conf = Settings::new().unwrap(); let tx: oneshot::Sender<()> = oneshot::channel().0; + let state = routes::AppState::with_storage( conf, StorageImpl::PostgresqlTest, @@ -204,6 +206,7 @@ async fn payments_create_failure() { let conf = Settings::new().unwrap(); static CV: aci::Aci = aci::Aci; let tx: oneshot::Sender<()> = oneshot::channel().0; + let state = routes::AppState::with_storage( conf, StorageImpl::PostgresqlTest, @@ -265,6 +268,7 @@ async fn refund_for_successful_payments() { merchant_connector_id: None, }; let tx: oneshot::Sender<()> = oneshot::channel().0; + let state = routes::AppState::with_storage( conf, StorageImpl::PostgresqlTest, @@ -333,6 +337,7 @@ async fn refunds_create_failure() { merchant_connector_id: None, }; let tx: oneshot::Sender<()> = oneshot::channel().0; + let state = routes::AppState::with_storage( conf, StorageImpl::PostgresqlTest, diff --git a/crates/router/tests/connectors/adyen.rs b/crates/router/tests/connectors/adyen.rs index 4b2cbcb7c4a9..714dc0d7d672 100644 --- a/crates/router/tests/connectors/adyen.rs +++ b/crates/router/tests/connectors/adyen.rs @@ -157,6 +157,7 @@ impl AdyenTest { complete_authorize_url: None, customer_id: None, surcharge_details: None, + request_incremental_authorization: false, }) } } diff --git a/crates/router/tests/connectors/bitpay.rs b/crates/router/tests/connectors/bitpay.rs index 755427140c4f..3c9f08bf1b69 100644 --- a/crates/router/tests/connectors/bitpay.rs +++ b/crates/router/tests/connectors/bitpay.rs @@ -92,6 +92,7 @@ fn payment_method_details() -> Option { capture_method: None, customer_id: None, surcharge_details: None, + request_incremental_authorization: false, }) } diff --git a/crates/router/tests/connectors/cashtocode.rs b/crates/router/tests/connectors/cashtocode.rs index 871677bb692a..a7c95936fbe8 100644 --- a/crates/router/tests/connectors/cashtocode.rs +++ b/crates/router/tests/connectors/cashtocode.rs @@ -67,6 +67,7 @@ impl CashtocodeTest { complete_authorize_url: None, customer_id: Some("John Doe".to_owned()), surcharge_details: None, + request_incremental_authorization: false, }) } diff --git a/crates/router/tests/connectors/coinbase.rs b/crates/router/tests/connectors/coinbase.rs index 512e03a5c94d..2ddb5464d4df 100644 --- a/crates/router/tests/connectors/coinbase.rs +++ b/crates/router/tests/connectors/coinbase.rs @@ -94,6 +94,7 @@ fn payment_method_details() -> Option { capture_method: None, customer_id: None, surcharge_details: None, + request_incremental_authorization: false, }) } diff --git a/crates/router/tests/connectors/cryptopay.rs b/crates/router/tests/connectors/cryptopay.rs index e9c43cee3af6..11e556215c35 100644 --- a/crates/router/tests/connectors/cryptopay.rs +++ b/crates/router/tests/connectors/cryptopay.rs @@ -92,6 +92,7 @@ fn payment_method_details() -> Option { capture_method: None, customer_id: None, surcharge_details: None, + request_incremental_authorization: false, }) } diff --git a/crates/router/tests/connectors/opennode.rs b/crates/router/tests/connectors/opennode.rs index 248bbb02e520..707192e01c3b 100644 --- a/crates/router/tests/connectors/opennode.rs +++ b/crates/router/tests/connectors/opennode.rs @@ -93,6 +93,7 @@ fn payment_method_details() -> Option { capture_method: None, customer_id: None, surcharge_details: None, + request_incremental_authorization: false, }) } diff --git a/crates/router/tests/connectors/utils.rs b/crates/router/tests/connectors/utils.rs index 67a0625968fb..823b3eae497d 100644 --- a/crates/router/tests/connectors/utils.rs +++ b/crates/router/tests/connectors/utils.rs @@ -96,6 +96,7 @@ pub trait ConnectorActions: Connector { payment_info, ); let tx: oneshot::Sender<()> = oneshot::channel().0; + let state = routes::AppState::with_storage( Settings::new().unwrap(), StorageImpl::PostgresqlTest, @@ -120,6 +121,7 @@ pub trait ConnectorActions: Connector { payment_info, ); let tx: oneshot::Sender<()> = oneshot::channel().0; + let state = routes::AppState::with_storage( Settings::new().unwrap(), StorageImpl::PostgresqlTest, @@ -148,6 +150,7 @@ pub trait ConnectorActions: Connector { payment_info, ); let tx: oneshot::Sender<()> = oneshot::channel().0; + let state = routes::AppState::with_storage( Settings::new().unwrap(), StorageImpl::PostgresqlTest, @@ -561,6 +564,7 @@ pub trait ConnectorActions: Connector { .get_connector_integration(); let mut request = self.get_payout_request(None, payout_type, payment_info); let tx: oneshot::Sender<()> = oneshot::channel().0; + let state = routes::AppState::with_storage( Settings::new().unwrap(), StorageImpl::PostgresqlTest, @@ -601,6 +605,7 @@ pub trait ConnectorActions: Connector { .get_connector_integration(); let mut request = self.get_payout_request(connector_payout_id, payout_type, payment_info); let tx: oneshot::Sender<()> = oneshot::channel().0; + let state = routes::AppState::with_storage( Settings::new().unwrap(), StorageImpl::PostgresqlTest, @@ -642,6 +647,7 @@ pub trait ConnectorActions: Connector { let mut request = self.get_payout_request(None, payout_type, payment_info); request.connector_customer = connector_customer; let tx: oneshot::Sender<()> = oneshot::channel().0; + let state = routes::AppState::with_storage( Settings::new().unwrap(), StorageImpl::PostgresqlTest, @@ -683,6 +689,7 @@ pub trait ConnectorActions: Connector { let mut request = self.get_payout_request(Some(connector_payout_id), payout_type, payment_info); let tx: oneshot::Sender<()> = oneshot::channel().0; + let state = routes::AppState::with_storage( Settings::new().unwrap(), StorageImpl::PostgresqlTest, @@ -770,6 +777,7 @@ pub trait ConnectorActions: Connector { .get_connector_integration(); let mut request = self.get_payout_request(None, payout_type, payment_info); let tx = oneshot::channel().0; + let state = routes::AppState::with_storage( Settings::new().unwrap(), StorageImpl::PostgresqlTest, @@ -802,6 +810,7 @@ async fn call_connector< ) -> Result, Report> { let conf = Settings::new().unwrap(); let tx: oneshot::Sender<()> = oneshot::channel().0; + let state = routes::AppState::with_storage( conf, StorageImpl::PostgresqlTest, @@ -899,6 +908,7 @@ impl Default for PaymentAuthorizeType { webhook_url: None, customer_id: None, surcharge_details: None, + request_incremental_authorization: false, }; Self(data) } @@ -1034,6 +1044,7 @@ pub fn get_connector_metadata( connector_metadata, network_txn_id: _, connector_response_reference_id: _, + incremental_authorization_allowed: _, }) => connector_metadata, _ => None, } diff --git a/crates/router/tests/connectors/worldline.rs b/crates/router/tests/connectors/worldline.rs index 6163949c6c58..fd697f95b754 100644 --- a/crates/router/tests/connectors/worldline.rs +++ b/crates/router/tests/connectors/worldline.rs @@ -102,6 +102,7 @@ impl WorldlineTest { complete_authorize_url: None, customer_id: None, surcharge_details: None, + request_incremental_authorization: false, }) } } diff --git a/crates/router/tests/payments2.rs b/crates/router/tests/payments2.rs index 5d4ca844061f..42e5524a15d5 100644 --- a/crates/router/tests/payments2.rs +++ b/crates/router/tests/payments2.rs @@ -217,6 +217,7 @@ async fn payments_create_core_adyen_no_redirect() { use router::configs::settings::Settings; let conf = Settings::new().expect("invalid settings"); let tx: oneshot::Sender<()> = oneshot::channel().0; + let state = routes::AppState::with_storage( conf, StorageImpl::PostgresqlTest, diff --git a/crates/router/tests/utils.rs b/crates/router/tests/utils.rs index 6cddbc043662..339eca6fa0fb 100644 --- a/crates/router/tests/utils.rs +++ b/crates/router/tests/utils.rs @@ -48,6 +48,7 @@ pub async fn mk_service( conf.connectors.stripe.base_url = url; } let tx: oneshot::Sender<()> = oneshot::channel().0; + let app_state = AppState::with_storage( conf, router::db::StorageImpl::Mock, diff --git a/crates/router_env/src/lib.rs b/crates/router_env/src/lib.rs index e75606aa1531..3c7ba8b93df7 100644 --- a/crates/router_env/src/lib.rs +++ b/crates/router_env/src/lib.rs @@ -39,10 +39,19 @@ use crate::types::FlowMetric; #[derive(Debug, Display, Clone, PartialEq, Eq)] pub enum AnalyticsFlow { GetInfo, + GetPaymentMetrics, + GetRefundsMetrics, + GetSdkMetrics, GetPaymentFilters, GetRefundFilters, - GetRefundsMetrics, - GetPaymentMetrics, + GetSdkEventFilters, + GetApiEvents, + GetSdkEvents, + GeneratePaymentReport, + GenerateDisputeReport, + GenerateRefundReport, + GetApiEventMetrics, + GetApiEventFilters, } impl FlowMetric for AnalyticsFlow {} diff --git a/crates/router_env/src/logger/types.rs b/crates/router_env/src/logger/types.rs index 2a174f42eb63..f54a5a82baaf 100644 --- a/crates/router_env/src/logger/types.rs +++ b/crates/router_env/src/logger/types.rs @@ -259,6 +259,34 @@ pub enum Flow { DecisionManagerRetrieveConfig, /// Change password flow ChangePassword, + /// Set Dashboard Metadata flow + SetDashboardMetadata, + /// Get Multiple Dashboard Metadata flow + GetMutltipleDashboardMetadata, + /// Payment Connector Verify + VerifyPaymentConnector, + /// Internal user signup + InternalUserSignup, + /// Switch merchant + SwitchMerchant, + /// Get permission info + GetAuthorizationInfo, + /// List roles + ListRoles, + /// Get role + GetRole, + /// Update user role + UpdateUserRole, + /// Create merchant account for user in a org + UserMerchantAccountCreate, + /// Generate Sample Data + GenerateSampleData, + /// Delete Sample Data + DeleteSampleData, + /// List merchant accounts for user + UserMerchantAccountList, + /// Get users for merchant account + GetUserDetails, } /// diff --git a/crates/scheduler/Cargo.toml b/crates/scheduler/Cargo.toml index e0b68c709e8d..5e8674ab3814 100644 --- a/crates/scheduler/Cargo.toml +++ b/crates/scheduler/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" [features] default = ["kv_store", "olap"] -olap = [] +olap = ["storage_impl/olap"] kv_store = [] [dependencies] diff --git a/crates/storage_impl/src/config.rs b/crates/storage_impl/src/config.rs index f53507831b11..fd95a6d315d6 100644 --- a/crates/storage_impl/src/config.rs +++ b/crates/storage_impl/src/config.rs @@ -1,6 +1,6 @@ use masking::Secret; -#[derive(Debug, Clone)] +#[derive(Debug, Clone, serde::Deserialize)] pub struct Database { pub username: String, pub password: Secret, @@ -9,7 +9,41 @@ pub struct Database { pub dbname: String, pub pool_size: u32, pub connection_timeout: u64, - pub queue_strategy: bb8::QueueStrategy, + pub queue_strategy: QueueStrategy, pub min_idle: Option, pub max_lifetime: Option, } + +#[derive(Debug, serde::Deserialize, Clone, Copy, Default)] +#[serde(rename_all = "PascalCase")] +pub enum QueueStrategy { + #[default] + Fifo, + Lifo, +} + +impl From for bb8::QueueStrategy { + fn from(value: QueueStrategy) -> Self { + match value { + QueueStrategy::Fifo => Self::Fifo, + QueueStrategy::Lifo => Self::Lifo, + } + } +} + +impl Default for Database { + fn default() -> Self { + Self { + username: String::new(), + password: Secret::::default(), + host: "localhost".into(), + port: 5432, + dbname: String::new(), + pool_size: 5, + connection_timeout: 10, + queue_strategy: QueueStrategy::default(), + min_idle: None, + max_lifetime: None, + } + } +} diff --git a/crates/storage_impl/src/database/store.rs b/crates/storage_impl/src/database/store.rs index c36575e37c97..75c34af14ac1 100644 --- a/crates/storage_impl/src/database/store.rs +++ b/crates/storage_impl/src/database/store.rs @@ -89,7 +89,7 @@ pub async fn diesel_make_pg_pool( let mut pool = bb8::Pool::builder() .max_size(database.pool_size) .min_idle(database.min_idle) - .queue_strategy(database.queue_strategy) + .queue_strategy(database.queue_strategy.into()) .connection_timeout(std::time::Duration::from_secs(database.connection_timeout)) .max_lifetime(database.max_lifetime.map(std::time::Duration::from_secs)); diff --git a/crates/storage_impl/src/mock_db.rs b/crates/storage_impl/src/mock_db.rs index 4cdf8e2456bb..e22d39ce70c8 100644 --- a/crates/storage_impl/src/mock_db.rs +++ b/crates/storage_impl/src/mock_db.rs @@ -43,6 +43,7 @@ pub struct MockDb { pub organizations: Arc>>, pub users: Arc>>, pub user_roles: Arc>>, + pub dashboard_metadata: Arc>>, } impl MockDb { @@ -78,6 +79,7 @@ impl MockDb { organizations: Default::default(), users: Default::default(), user_roles: Default::default(), + dashboard_metadata: Default::default(), }) } } diff --git a/crates/storage_impl/src/mock_db/payment_intent.rs b/crates/storage_impl/src/mock_db/payment_intent.rs index 08a4a2aabeaa..a3e82c1d1044 100644 --- a/crates/storage_impl/src/mock_db/payment_intent.rs +++ b/crates/storage_impl/src/mock_db/payment_intent.rs @@ -106,6 +106,8 @@ impl PaymentIntentInterface for MockDb { payment_confirm_source: new.payment_confirm_source, updated_by: storage_scheme.to_string(), surcharge_applicable: new.surcharge_applicable, + request_incremental_authorization: new.request_incremental_authorization, + incremental_authorization_allowed: new.incremental_authorization_allowed, }; payment_intents.push(payment_intent.clone()); Ok(payment_intent) diff --git a/crates/storage_impl/src/payments/payment_intent.rs b/crates/storage_impl/src/payments/payment_intent.rs index 5b859274766e..f989d22219d1 100644 --- a/crates/storage_impl/src/payments/payment_intent.rs +++ b/crates/storage_impl/src/payments/payment_intent.rs @@ -98,6 +98,8 @@ impl PaymentIntentInterface for KVRouterStore { payment_confirm_source: new.payment_confirm_source, updated_by: storage_scheme.to_string(), surcharge_applicable: new.surcharge_applicable, + request_incremental_authorization: new.request_incremental_authorization, + incremental_authorization_allowed: new.incremental_authorization_allowed, }; let redis_entry = kv::TypedSql { op: kv::DBOperation::Insert { @@ -759,6 +761,8 @@ impl DataModelExt for PaymentIntentNew { payment_confirm_source: self.payment_confirm_source, updated_by: self.updated_by, surcharge_applicable: self.surcharge_applicable, + request_incremental_authorization: self.request_incremental_authorization, + incremental_authorization_allowed: self.incremental_authorization_allowed, } } @@ -799,6 +803,8 @@ impl DataModelExt for PaymentIntentNew { payment_confirm_source: storage_model.payment_confirm_source, updated_by: storage_model.updated_by, surcharge_applicable: storage_model.surcharge_applicable, + request_incremental_authorization: storage_model.request_incremental_authorization, + incremental_authorization_allowed: storage_model.incremental_authorization_allowed, } } } @@ -844,6 +850,8 @@ impl DataModelExt for PaymentIntent { payment_confirm_source: self.payment_confirm_source, updated_by: self.updated_by, surcharge_applicable: self.surcharge_applicable, + request_incremental_authorization: self.request_incremental_authorization, + incremental_authorization_allowed: self.incremental_authorization_allowed, } } @@ -885,6 +893,8 @@ impl DataModelExt for PaymentIntent { payment_confirm_source: storage_model.payment_confirm_source, updated_by: storage_model.updated_by, surcharge_applicable: storage_model.surcharge_applicable, + request_incremental_authorization: storage_model.request_incremental_authorization, + incremental_authorization_allowed: storage_model.incremental_authorization_allowed, } } } @@ -899,11 +909,13 @@ impl DataModelExt for PaymentIntentUpdate { amount_captured, return_url, updated_by, + incremental_authorization_allowed, } => DieselPaymentIntentUpdate::ResponseUpdate { status, amount_captured, return_url, updated_by, + incremental_authorization_allowed, }, Self::MetadataUpdate { metadata, @@ -938,9 +950,15 @@ impl DataModelExt for PaymentIntentUpdate { billing_address_id, updated_by, }, - Self::PGStatusUpdate { status, updated_by } => { - DieselPaymentIntentUpdate::PGStatusUpdate { status, updated_by } - } + Self::PGStatusUpdate { + status, + updated_by, + incremental_authorization_allowed, + } => DieselPaymentIntentUpdate::PGStatusUpdate { + status, + updated_by, + incremental_authorization_allowed, + }, Self::Update { amount, currency, diff --git a/docker-compose.yml b/docker-compose.yml index fd18906143f5..f51a47aee940 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -273,3 +273,66 @@ services: - "8001:8001" volumes: - redisinsight_store:/db + + kafka0: + image: confluentinc/cp-kafka:7.0.5 + hostname: kafka0 + networks: + - router_net + ports: + - 9092:9092 + - 9093 + - 9997 + - 29092 + environment: + KAFKA_BROKER_ID: 1 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONTROLLER:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092 + KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 + KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 + KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 + KAFKA_PROCESS_ROLES: 'broker,controller' + KAFKA_NODE_ID: 1 + KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka0:29093' + KAFKA_LISTENERS: 'PLAINTEXT://kafka0:29092,CONTROLLER://kafka0:29093,PLAINTEXT_HOST://0.0.0.0:9092' + KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' + KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' + JMX_PORT: 9997 + KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka0 -Dcom.sun.management.jmxremote.rmi.port=9997 + profiles: + - analytics + volumes: + - ./monitoring/kafka-script.sh:/tmp/update_run.sh + command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" + + # Kafka UI for debugging kafka queues + kafka-ui: + image: provectuslabs/kafka-ui:latest + ports: + - 8090:8080 + networks: + - router_net + depends_on: + - kafka0 + profiles: + - analytics + environment: + KAFKA_CLUSTERS_0_NAME: local + KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092 + KAFKA_CLUSTERS_0_JMXPORT: 9997 + + clickhouse-server: + image: clickhouse/clickhouse-server:23.5 + networks: + - router_net + ports: + - "9000" + - "8123:8123" + profiles: + - analytics + ulimits: + nofile: + soft: 262144 + hard: 262144 \ No newline at end of file diff --git a/migrations/2023-11-23-100644_create_dashboard_metadata_table/down.sql b/migrations/2023-11-23-100644_create_dashboard_metadata_table/down.sql new file mode 100644 index 000000000000..746fb42109e9 --- /dev/null +++ b/migrations/2023-11-23-100644_create_dashboard_metadata_table/down.sql @@ -0,0 +1,3 @@ +-- This file should undo anything in `up.sql` +DROP INDEX IF EXISTS dashboard_metadata_index; +DROP TABLE IF EXISTS dashboard_metadata; \ No newline at end of file diff --git a/migrations/2023-11-23-100644_create_dashboard_metadata_table/up.sql b/migrations/2023-11-23-100644_create_dashboard_metadata_table/up.sql new file mode 100644 index 000000000000..4a74afb9ad0e --- /dev/null +++ b/migrations/2023-11-23-100644_create_dashboard_metadata_table/up.sql @@ -0,0 +1,21 @@ +-- Your SQL goes here + +CREATE TABLE IF NOT EXISTS dashboard_metadata ( + id SERIAL PRIMARY KEY, + user_id VARCHAR(64), + merchant_id VARCHAR(64) NOT NULL, + org_id VARCHAR(64) NOT NULL, + data_key VARCHAR(64) NOT NULL, + data_value JSON NOT NULL, + created_by VARCHAR(64) NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT now(), + last_modified_by VARCHAR(64) NOT NULL, + last_modified_at TIMESTAMP NOT NULL DEFAULT now() + ); + +CREATE UNIQUE INDEX IF NOT EXISTS dashboard_metadata_index ON dashboard_metadata ( + COALESCE(user_id, '0'), + merchant_id, + org_id, + data_key +); \ No newline at end of file diff --git a/migrations/2023-11-28-081058_add-request_incremental_authorization-in-payment-intent/down.sql b/migrations/2023-11-28-081058_add-request_incremental_authorization-in-payment-intent/down.sql new file mode 100644 index 000000000000..5ee12132dee6 --- /dev/null +++ b/migrations/2023-11-28-081058_add-request_incremental_authorization-in-payment-intent/down.sql @@ -0,0 +1,3 @@ +-- This file should undo anything in `up.sql` +ALTER TABLE payment_intent DROP COLUMN IF EXISTS request_incremental_authorization; +DROP TYPE "RequestIncrementalAuthorization"; diff --git a/migrations/2023-11-28-081058_add-request_incremental_authorization-in-payment-intent/up.sql b/migrations/2023-11-28-081058_add-request_incremental_authorization-in-payment-intent/up.sql new file mode 100644 index 000000000000..2c4d68593588 --- /dev/null +++ b/migrations/2023-11-28-081058_add-request_incremental_authorization-in-payment-intent/up.sql @@ -0,0 +1,3 @@ +-- Your SQL goes here +CREATE TYPE "RequestIncrementalAuthorization" AS ENUM ('true', 'false', 'default'); +ALTER TABLE payment_intent ADD COLUMN IF NOT EXISTS request_incremental_authorization "RequestIncrementalAuthorization" NOT NULL DEFAULT 'false'::"RequestIncrementalAuthorization"; diff --git a/migrations/2023-11-29-063030_add-incremental_authorization_allowed-in-payment-intent/down.sql b/migrations/2023-11-29-063030_add-incremental_authorization_allowed-in-payment-intent/down.sql new file mode 100644 index 000000000000..f08165481889 --- /dev/null +++ b/migrations/2023-11-29-063030_add-incremental_authorization_allowed-in-payment-intent/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +ALTER TABLE payment_intent DROP COLUMN IF EXISTS incremental_authorization_allowed; \ No newline at end of file diff --git a/migrations/2023-11-29-063030_add-incremental_authorization_allowed-in-payment-intent/up.sql b/migrations/2023-11-29-063030_add-incremental_authorization_allowed-in-payment-intent/up.sql new file mode 100644 index 000000000000..73fe22dd52df --- /dev/null +++ b/migrations/2023-11-29-063030_add-incremental_authorization_allowed-in-payment-intent/up.sql @@ -0,0 +1,2 @@ +-- Your SQL goes here +ALTER TABLE payment_intent ADD COLUMN IF NOT EXISTS incremental_authorization_allowed BOOLEAN; \ No newline at end of file diff --git a/openapi/openapi_spec.json b/openapi/openapi_spec.json index 86dc053d2d77..b1e313f15baa 100644 --- a/openapi/openapi_spec.json +++ b/openapi/openapi_spec.json @@ -4316,6 +4316,11 @@ "type": "string", "description": "The card holder's name", "example": "John Test" + }, + "card_cvc": { + "type": "string", + "description": "The CVC number for the card", + "nullable": true } } }, @@ -9545,7 +9550,8 @@ }, "card_cvc": { "type": "string", - "description": "This is used when payment is to be confirmed and the card is not saved", + "description": "This is used when payment is to be confirmed and the card is not saved.\nThis field will be deprecated soon, use the CardToken object instead", + "deprecated": true, "nullable": true }, "shipping": { @@ -9721,6 +9727,11 @@ } ], "nullable": true + }, + "request_incremental_authorization": { + "type": "boolean", + "description": "Request for an incremental authorization", + "nullable": true } } }, @@ -9909,7 +9920,8 @@ }, "card_cvc": { "type": "string", - "description": "This is used when payment is to be confirmed and the card is not saved", + "description": "This is used when payment is to be confirmed and the card is not saved.\nThis field will be deprecated soon, use the CardToken object instead", + "deprecated": true, "nullable": true }, "shipping": { @@ -10085,6 +10097,11 @@ } ], "nullable": true + }, + "request_incremental_authorization": { + "type": "boolean", + "description": "Request for an incremental authorization", + "nullable": true } } }, @@ -10518,6 +10535,11 @@ "type": "string", "description": "Identifier of the connector ( merchant connector account ) which was chosen to make the payment", "nullable": true + }, + "incremental_authorization_allowed": { + "type": "boolean", + "description": "If true incremental authorization can be performed on this payment", + "nullable": true } } }, diff --git a/postman/collection-dir/adyen_uk/Flow Testcases/Variation Cases/Scenario8-Refund for unsuccessful payment/Refunds - Create/event.test.js b/postman/collection-dir/adyen_uk/Flow Testcases/Variation Cases/Scenario8-Refund for unsuccessful payment/Refunds - Create/event.test.js index 6731d57fb694..b88beefec22e 100644 --- a/postman/collection-dir/adyen_uk/Flow Testcases/Variation Cases/Scenario8-Refund for unsuccessful payment/Refunds - Create/event.test.js +++ b/postman/collection-dir/adyen_uk/Flow Testcases/Variation Cases/Scenario8-Refund for unsuccessful payment/Refunds - Create/event.test.js @@ -50,10 +50,10 @@ if (jsonData?.error?.type) { // Response body should have value "invalid_request" for "error type" if (jsonData?.error?.message) { pm.test( - "[POST]::/payments - Content check if value for 'error.message' matches 'The payment has not succeeded yet. Please pass a successful payment to initiate refund'", + "[POST]::/payments - Content check if value for 'error.message' matches 'This Payment could not be refund because it has a status of requires_confirmation. The expected state is succeeded, partially_captured'", function () { pm.expect(jsonData.error.message).to.eql( - "The payment has not succeeded yet. Please pass a successful payment to initiate refund", + "This Payment could not be refund because it has a status of requires_confirmation. The expected state is succeeded, partially_captured", ); }, ); diff --git a/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario1-Create payment with confirm true/Payments - Create/request.json b/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario1-Create payment with confirm true/Payments - Create/request.json index a63210df7f42..03aea095ff35 100644 --- a/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario1-Create payment with confirm true/Payments - Create/request.json +++ b/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario1-Create payment with confirm true/Payments - Create/request.json @@ -79,7 +79,7 @@ { "product_name": "Apple iphone 15", "quantity": 1, - "amount": 5500, + "amount": 6540, "account_name": "transaction_processing" } ] diff --git a/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario2-Create payment with confirm false/Payments - Create/request.json b/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario2-Create payment with confirm false/Payments - Create/request.json index 99392fc0f916..5a651cc0f119 100644 --- a/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario2-Create payment with confirm false/Payments - Create/request.json +++ b/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario2-Create payment with confirm false/Payments - Create/request.json @@ -79,7 +79,7 @@ { "product_name": "Apple iphone 15", "quantity": 1, - "amount": 5500, + "amount": 6540, "account_name": "transaction_processing" } ] diff --git a/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario3-Create payment without PMD/Payments - Create/request.json b/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario3-Create payment without PMD/Payments - Create/request.json index 90982e5acd38..54cf1b15e3db 100644 --- a/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario3-Create payment without PMD/Payments - Create/request.json +++ b/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario3-Create payment without PMD/Payments - Create/request.json @@ -69,7 +69,7 @@ { "product_name": "Apple iphone 15", "quantity": 1, - "amount": 5500, + "amount": 6540, "account_name": "transaction_processing" } ] diff --git a/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario4-Create payment with Manual capture/Payments - Create/request.json b/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario4-Create payment with Manual capture/Payments - Create/request.json index 0fc567f8bea0..f0915480e13e 100644 --- a/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario4-Create payment with Manual capture/Payments - Create/request.json +++ b/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario4-Create payment with Manual capture/Payments - Create/request.json @@ -79,7 +79,7 @@ { "product_name": "Apple iphone 15", "quantity": 1, - "amount": 5500, + "amount": 6540, "account_name": "transaction_processing" } ] diff --git a/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario5-Refund full payment/Payments - Create/request.json b/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario5-Refund full payment/Payments - Create/request.json index 625ae3a9d286..00b12f40997f 100644 --- a/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario5-Refund full payment/Payments - Create/request.json +++ b/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario5-Refund full payment/Payments - Create/request.json @@ -78,7 +78,7 @@ { "product_name": "Apple iphone 15", "quantity": 1, - "amount": 5500, + "amount": 6540, "account_name": "transaction_processing" } ] diff --git a/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario7-Void the payment/Payments - Create/request.json b/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario7-Void the payment/Payments - Create/request.json index 99392fc0f916..5a651cc0f119 100644 --- a/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario7-Void the payment/Payments - Create/request.json +++ b/postman/collection-dir/payme/Flow Testcases/Happy Cases/Scenario7-Void the payment/Payments - Create/request.json @@ -79,7 +79,7 @@ { "product_name": "Apple iphone 15", "quantity": 1, - "amount": 5500, + "amount": 6540, "account_name": "transaction_processing" } ] diff --git a/postman/collection-dir/payme/Flow Testcases/QuickStart/Payments - Create/request.json b/postman/collection-dir/payme/Flow Testcases/QuickStart/Payments - Create/request.json index a99d3db4fa53..72c62f360b8d 100644 --- a/postman/collection-dir/payme/Flow Testcases/QuickStart/Payments - Create/request.json +++ b/postman/collection-dir/payme/Flow Testcases/QuickStart/Payments - Create/request.json @@ -79,7 +79,7 @@ { "product_name": "Apple iphone 15", "quantity": 1, - "amount": 5500, + "amount": 6540, "account_name": "transaction_processing" } ] diff --git a/postman/collection-dir/payme/Flow Testcases/Variation Cases/Scenario3-Capture greater amount/Payments - Create/request.json b/postman/collection-dir/payme/Flow Testcases/Variation Cases/Scenario3-Capture greater amount/Payments - Create/request.json index 0fc567f8bea0..f0915480e13e 100644 --- a/postman/collection-dir/payme/Flow Testcases/Variation Cases/Scenario3-Capture greater amount/Payments - Create/request.json +++ b/postman/collection-dir/payme/Flow Testcases/Variation Cases/Scenario3-Capture greater amount/Payments - Create/request.json @@ -79,7 +79,7 @@ { "product_name": "Apple iphone 15", "quantity": 1, - "amount": 5500, + "amount": 6540, "account_name": "transaction_processing" } ] diff --git a/postman/collection-dir/payme/Flow Testcases/Variation Cases/Scenario4-Capture the succeeded payment/Payments - Create/request.json b/postman/collection-dir/payme/Flow Testcases/Variation Cases/Scenario4-Capture the succeeded payment/Payments - Create/request.json index a63210df7f42..03aea095ff35 100644 --- a/postman/collection-dir/payme/Flow Testcases/Variation Cases/Scenario4-Capture the succeeded payment/Payments - Create/request.json +++ b/postman/collection-dir/payme/Flow Testcases/Variation Cases/Scenario4-Capture the succeeded payment/Payments - Create/request.json @@ -79,7 +79,7 @@ { "product_name": "Apple iphone 15", "quantity": 1, - "amount": 5500, + "amount": 6540, "account_name": "transaction_processing" } ] diff --git a/postman/collection-dir/payme/Flow Testcases/Variation Cases/Scenario5-Void the success_slash_failure payment/Payments - Create/request.json b/postman/collection-dir/payme/Flow Testcases/Variation Cases/Scenario5-Void the success_slash_failure payment/Payments - Create/request.json index a63210df7f42..03aea095ff35 100644 --- a/postman/collection-dir/payme/Flow Testcases/Variation Cases/Scenario5-Void the success_slash_failure payment/Payments - Create/request.json +++ b/postman/collection-dir/payme/Flow Testcases/Variation Cases/Scenario5-Void the success_slash_failure payment/Payments - Create/request.json @@ -79,7 +79,7 @@ { "product_name": "Apple iphone 15", "quantity": 1, - "amount": 5500, + "amount": 6540, "account_name": "transaction_processing" } ] diff --git a/postman/collection-json/payme.postman_collection.json b/postman/collection-json/payme.postman_collection.json index 4bca668a6af6..280a131386e5 100644 --- a/postman/collection-json/payme.postman_collection.json +++ b/postman/collection-json/payme.postman_collection.json @@ -532,7 +532,7 @@ "language": "json" } }, - "raw": "{\"amount\":6540,\"currency\":\"USD\",\"confirm\":true,\"capture_method\":\"automatic\",\"capture_on\":\"2022-09-10T10:11:12Z\",\"amount_to_capture\":6540,\"customer_id\":\"StripeCustomer\",\"email\":\"guest@example.com\",\"name\":\"John Doe\",\"phone\":\"999999999\",\"phone_country_code\":\"+1\",\"description\":\"Its my first payment request\",\"authentication_type\":\"no_three_ds\",\"return_url\":\"https://duck.com\",\"payment_method\":\"card\",\"payment_method_data\":{\"card\":{\"card_number\":\"4242424242424242\",\"card_exp_month\":\"10\",\"card_exp_year\":\"25\",\"card_holder_name\":\"joseph Doe\",\"card_cvc\":\"123\"}},\"billing\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"shipping\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"statement_descriptor_name\":\"joseph\",\"statement_descriptor_suffix\":\"JS\",\"metadata\":{\"udf1\":\"value1\",\"new_customer\":\"true\",\"login_date\":\"2019-09-10T10:11:12Z\"},\"order_details\":[{\"product_name\":\"Apple iphone 15\",\"quantity\":1,\"amount\":5500,\"account_name\":\"transaction_processing\"}]}" + "raw": "{\"amount\":6540,\"currency\":\"USD\",\"confirm\":true,\"capture_method\":\"automatic\",\"capture_on\":\"2022-09-10T10:11:12Z\",\"amount_to_capture\":6540,\"customer_id\":\"StripeCustomer\",\"email\":\"guest@example.com\",\"name\":\"John Doe\",\"phone\":\"999999999\",\"phone_country_code\":\"+1\",\"description\":\"Its my first payment request\",\"authentication_type\":\"no_three_ds\",\"return_url\":\"https://duck.com\",\"payment_method\":\"card\",\"payment_method_data\":{\"card\":{\"card_number\":\"4242424242424242\",\"card_exp_month\":\"10\",\"card_exp_year\":\"25\",\"card_holder_name\":\"joseph Doe\",\"card_cvc\":\"123\"}},\"billing\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"shipping\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"statement_descriptor_name\":\"joseph\",\"statement_descriptor_suffix\":\"JS\",\"metadata\":{\"udf1\":\"value1\",\"new_customer\":\"true\",\"login_date\":\"2019-09-10T10:11:12Z\"},\"order_details\":[{\"product_name\":\"Apple iphone 15\",\"quantity\":1,\"amount\":6540,\"account_name\":\"transaction_processing\"}]}" }, "url": { "raw": "{{baseUrl}}/payments", @@ -761,7 +761,7 @@ "language": "json" } }, - "raw": "{\"amount\":6540,\"currency\":\"USD\",\"confirm\":true,\"capture_method\":\"automatic\",\"capture_on\":\"2022-09-10T10:11:12Z\",\"amount_to_capture\":6540,\"customer_id\":\"StripeCustomer\",\"email\":\"guest@example.com\",\"name\":\"John Doe\",\"phone\":\"999999999\",\"phone_country_code\":\"+65\",\"description\":\"Its my first payment request\",\"authentication_type\":\"no_three_ds\",\"return_url\":\"https://duck.com\",\"payment_method\":\"card\",\"payment_method_data\":{\"card\":{\"card_number\":\"4242424242424242\",\"card_exp_month\":\"10\",\"card_exp_year\":\"25\",\"card_holder_name\":\"joseph Doe\",\"card_cvc\":\"123\"}},\"billing\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"shipping\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"statement_descriptor_name\":\"joseph\",\"statement_descriptor_suffix\":\"JS\",\"metadata\":{\"udf1\":\"value1\",\"new_customer\":\"true\",\"login_date\":\"2019-09-10T10:11:12Z\"},\"order_details\":[{\"product_name\":\"Apple iphone 15\",\"quantity\":1,\"amount\":5500,\"account_name\":\"transaction_processing\"}]}" + "raw": "{\"amount\":6540,\"currency\":\"USD\",\"confirm\":true,\"capture_method\":\"automatic\",\"capture_on\":\"2022-09-10T10:11:12Z\",\"amount_to_capture\":6540,\"customer_id\":\"StripeCustomer\",\"email\":\"guest@example.com\",\"name\":\"John Doe\",\"phone\":\"999999999\",\"phone_country_code\":\"+65\",\"description\":\"Its my first payment request\",\"authentication_type\":\"no_three_ds\",\"return_url\":\"https://duck.com\",\"payment_method\":\"card\",\"payment_method_data\":{\"card\":{\"card_number\":\"4242424242424242\",\"card_exp_month\":\"10\",\"card_exp_year\":\"25\",\"card_holder_name\":\"joseph Doe\",\"card_cvc\":\"123\"}},\"billing\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"shipping\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"statement_descriptor_name\":\"joseph\",\"statement_descriptor_suffix\":\"JS\",\"metadata\":{\"udf1\":\"value1\",\"new_customer\":\"true\",\"login_date\":\"2019-09-10T10:11:12Z\"},\"order_details\":[{\"product_name\":\"Apple iphone 15\",\"quantity\":1,\"amount\":6540,\"account_name\":\"transaction_processing\"}]}" }, "url": { "raw": "{{baseUrl}}/payments", @@ -1003,7 +1003,7 @@ "language": "json" } }, - "raw": "{\"amount\":6540,\"currency\":\"USD\",\"confirm\":false,\"capture_method\":\"automatic\",\"capture_on\":\"2022-09-10T10:11:12Z\",\"amount_to_capture\":6540,\"customer_id\":\"StripeCustomer\",\"email\":\"guest@example.com\",\"name\":\"John Doe\",\"phone\":\"999999999\",\"phone_country_code\":\"+65\",\"description\":\"Its my first payment request\",\"authentication_type\":\"no_three_ds\",\"return_url\":\"https://duck.com\",\"payment_method\":\"card\",\"payment_method_data\":{\"card\":{\"card_number\":\"4242424242424242\",\"card_exp_month\":\"10\",\"card_exp_year\":\"25\",\"card_holder_name\":\"joseph Doe\",\"card_cvc\":\"123\"}},\"billing\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"shipping\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"statement_descriptor_name\":\"joseph\",\"statement_descriptor_suffix\":\"JS\",\"metadata\":{\"udf1\":\"value1\",\"new_customer\":\"true\",\"login_date\":\"2019-09-10T10:11:12Z\"},\"order_details\":[{\"product_name\":\"Apple iphone 15\",\"quantity\":1,\"amount\":5500,\"account_name\":\"transaction_processing\"}]}" + "raw": "{\"amount\":6540,\"currency\":\"USD\",\"confirm\":false,\"capture_method\":\"automatic\",\"capture_on\":\"2022-09-10T10:11:12Z\",\"amount_to_capture\":6540,\"customer_id\":\"StripeCustomer\",\"email\":\"guest@example.com\",\"name\":\"John Doe\",\"phone\":\"999999999\",\"phone_country_code\":\"+65\",\"description\":\"Its my first payment request\",\"authentication_type\":\"no_three_ds\",\"return_url\":\"https://duck.com\",\"payment_method\":\"card\",\"payment_method_data\":{\"card\":{\"card_number\":\"4242424242424242\",\"card_exp_month\":\"10\",\"card_exp_year\":\"25\",\"card_holder_name\":\"joseph Doe\",\"card_cvc\":\"123\"}},\"billing\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"shipping\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"statement_descriptor_name\":\"joseph\",\"statement_descriptor_suffix\":\"JS\",\"metadata\":{\"udf1\":\"value1\",\"new_customer\":\"true\",\"login_date\":\"2019-09-10T10:11:12Z\"},\"order_details\":[{\"product_name\":\"Apple iphone 15\",\"quantity\":1,\"amount\":6540,\"account_name\":\"transaction_processing\"}]}" }, "url": { "raw": "{{baseUrl}}/payments", @@ -1395,7 +1395,7 @@ "language": "json" } }, - "raw": "{\"amount\":6540,\"currency\":\"USD\",\"confirm\":false,\"capture_method\":\"automatic\",\"capture_on\":\"2022-09-10T10:11:12Z\",\"amount_to_capture\":6540,\"customer_id\":\"StripeCustomer\",\"email\":\"guest@example.com\",\"name\":\"John Doe\",\"phone\":\"999999999\",\"phone_country_code\":\"+65\",\"description\":\"Its my first payment request\",\"authentication_type\":\"no_three_ds\",\"return_url\":\"https://duck.com\",\"billing\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"shipping\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"statement_descriptor_name\":\"joseph\",\"statement_descriptor_suffix\":\"JS\",\"metadata\":{\"udf1\":\"value1\",\"new_customer\":\"true\",\"login_date\":\"2019-09-10T10:11:12Z\"},\"order_details\":[{\"product_name\":\"Apple iphone 15\",\"quantity\":1,\"amount\":5500,\"account_name\":\"transaction_processing\"}]}" + "raw": "{\"amount\":6540,\"currency\":\"USD\",\"confirm\":false,\"capture_method\":\"automatic\",\"capture_on\":\"2022-09-10T10:11:12Z\",\"amount_to_capture\":6540,\"customer_id\":\"StripeCustomer\",\"email\":\"guest@example.com\",\"name\":\"John Doe\",\"phone\":\"999999999\",\"phone_country_code\":\"+65\",\"description\":\"Its my first payment request\",\"authentication_type\":\"no_three_ds\",\"return_url\":\"https://duck.com\",\"billing\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"shipping\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"statement_descriptor_name\":\"joseph\",\"statement_descriptor_suffix\":\"JS\",\"metadata\":{\"udf1\":\"value1\",\"new_customer\":\"true\",\"login_date\":\"2019-09-10T10:11:12Z\"},\"order_details\":[{\"product_name\":\"Apple iphone 15\",\"quantity\":1,\"amount\":6540,\"account_name\":\"transaction_processing\"}]}" }, "url": { "raw": "{{baseUrl}}/payments", @@ -1787,7 +1787,7 @@ "language": "json" } }, - "raw": "{\"amount\":6540,\"currency\":\"USD\",\"confirm\":true,\"capture_method\":\"manual\",\"capture_on\":\"2022-09-10T10:11:12Z\",\"amount_to_capture\":6540,\"customer_id\":\"StripeCustomer\",\"email\":\"guest@example.com\",\"name\":\"John Doe\",\"phone\":\"999999999\",\"phone_country_code\":\"+65\",\"description\":\"Its my first payment request\",\"authentication_type\":\"no_three_ds\",\"return_url\":\"https://duck.com\",\"payment_method\":\"card\",\"payment_method_data\":{\"card\":{\"card_number\":\"4242424242424242\",\"card_exp_month\":\"10\",\"card_exp_year\":\"25\",\"card_holder_name\":\"joseph Doe\",\"card_cvc\":\"123\"}},\"billing\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"shipping\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"statement_descriptor_name\":\"joseph\",\"statement_descriptor_suffix\":\"JS\",\"metadata\":{\"udf1\":\"value1\",\"new_customer\":\"true\",\"login_date\":\"2019-09-10T10:11:12Z\"},\"order_details\":[{\"product_name\":\"Apple iphone 15\",\"quantity\":1,\"amount\":5500,\"account_name\":\"transaction_processing\"}]}" + "raw": "{\"amount\":6540,\"currency\":\"USD\",\"confirm\":true,\"capture_method\":\"manual\",\"capture_on\":\"2022-09-10T10:11:12Z\",\"amount_to_capture\":6540,\"customer_id\":\"StripeCustomer\",\"email\":\"guest@example.com\",\"name\":\"John Doe\",\"phone\":\"999999999\",\"phone_country_code\":\"+65\",\"description\":\"Its my first payment request\",\"authentication_type\":\"no_three_ds\",\"return_url\":\"https://duck.com\",\"payment_method\":\"card\",\"payment_method_data\":{\"card\":{\"card_number\":\"4242424242424242\",\"card_exp_month\":\"10\",\"card_exp_year\":\"25\",\"card_holder_name\":\"joseph Doe\",\"card_cvc\":\"123\"}},\"billing\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"shipping\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"statement_descriptor_name\":\"joseph\",\"statement_descriptor_suffix\":\"JS\",\"metadata\":{\"udf1\":\"value1\",\"new_customer\":\"true\",\"login_date\":\"2019-09-10T10:11:12Z\"},\"order_details\":[{\"product_name\":\"Apple iphone 15\",\"quantity\":1,\"amount\":6540,\"account_name\":\"transaction_processing\"}]}" }, "url": { "raw": "{{baseUrl}}/payments", @@ -2189,7 +2189,7 @@ "language": "json" } }, - "raw": "{\"amount\":6540,\"currency\":\"USD\",\"confirm\":true,\"capture_method\":\"automatic\",\"capture_on\":\"2022-09-10T10:11:12Z\",\"amount_to_capture\":6540,\"customer_id\":\"StripeCustomer\",\"email\":\"guest@example.com\",\"name\":\"John Doe\",\"phone\":\"999999999\",\"phone_country_code\":\"+65\",\"description\":\"Its my first payment request\",\"authentication_type\":\"no_three_ds\",\"return_url\":\"https://duck.com\",\"payment_method\":\"card\",\"payment_method_data\":{\"card\":{\"card_number\":\"4242424242424242\",\"card_exp_month\":\"10\",\"card_exp_year\":\"25\",\"card_holder_name\":\"joseph Doe\",\"card_cvc\":\"123\"}},\"billing\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"shipping\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\"}},\"statement_descriptor_name\":\"joseph\",\"statement_descriptor_suffix\":\"JS\",\"metadata\":{\"udf1\":\"value1\",\"new_customer\":\"true\",\"login_date\":\"2019-09-10T10:11:12Z\"},\"order_details\":[{\"product_name\":\"Apple iphone 15\",\"quantity\":1,\"amount\":5500,\"account_name\":\"transaction_processing\"}]}" + "raw": "{\"amount\":6540,\"currency\":\"USD\",\"confirm\":true,\"capture_method\":\"automatic\",\"capture_on\":\"2022-09-10T10:11:12Z\",\"amount_to_capture\":6540,\"customer_id\":\"StripeCustomer\",\"email\":\"guest@example.com\",\"name\":\"John Doe\",\"phone\":\"999999999\",\"phone_country_code\":\"+65\",\"description\":\"Its my first payment request\",\"authentication_type\":\"no_three_ds\",\"return_url\":\"https://duck.com\",\"payment_method\":\"card\",\"payment_method_data\":{\"card\":{\"card_number\":\"4242424242424242\",\"card_exp_month\":\"10\",\"card_exp_year\":\"25\",\"card_holder_name\":\"joseph Doe\",\"card_cvc\":\"123\"}},\"billing\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"shipping\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\"}},\"statement_descriptor_name\":\"joseph\",\"statement_descriptor_suffix\":\"JS\",\"metadata\":{\"udf1\":\"value1\",\"new_customer\":\"true\",\"login_date\":\"2019-09-10T10:11:12Z\"},\"order_details\":[{\"product_name\":\"Apple iphone 15\",\"quantity\":1,\"amount\":6540,\"account_name\":\"transaction_processing\"}]}" }, "url": { "raw": "{{baseUrl}}/payments", @@ -3364,7 +3364,7 @@ "language": "json" } }, - "raw": "{\"amount\":6540,\"currency\":\"USD\",\"confirm\":false,\"capture_method\":\"automatic\",\"capture_on\":\"2022-09-10T10:11:12Z\",\"amount_to_capture\":6540,\"customer_id\":\"StripeCustomer\",\"email\":\"guest@example.com\",\"name\":\"John Doe\",\"phone\":\"999999999\",\"phone_country_code\":\"+65\",\"description\":\"Its my first payment request\",\"authentication_type\":\"no_three_ds\",\"return_url\":\"https://duck.com\",\"payment_method\":\"card\",\"payment_method_data\":{\"card\":{\"card_number\":\"4242424242424242\",\"card_exp_month\":\"10\",\"card_exp_year\":\"25\",\"card_holder_name\":\"joseph Doe\",\"card_cvc\":\"123\"}},\"billing\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"shipping\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"statement_descriptor_name\":\"joseph\",\"statement_descriptor_suffix\":\"JS\",\"metadata\":{\"udf1\":\"value1\",\"new_customer\":\"true\",\"login_date\":\"2019-09-10T10:11:12Z\"},\"order_details\":[{\"product_name\":\"Apple iphone 15\",\"quantity\":1,\"amount\":5500,\"account_name\":\"transaction_processing\"}]}" + "raw": "{\"amount\":6540,\"currency\":\"USD\",\"confirm\":false,\"capture_method\":\"automatic\",\"capture_on\":\"2022-09-10T10:11:12Z\",\"amount_to_capture\":6540,\"customer_id\":\"StripeCustomer\",\"email\":\"guest@example.com\",\"name\":\"John Doe\",\"phone\":\"999999999\",\"phone_country_code\":\"+65\",\"description\":\"Its my first payment request\",\"authentication_type\":\"no_three_ds\",\"return_url\":\"https://duck.com\",\"payment_method\":\"card\",\"payment_method_data\":{\"card\":{\"card_number\":\"4242424242424242\",\"card_exp_month\":\"10\",\"card_exp_year\":\"25\",\"card_holder_name\":\"joseph Doe\",\"card_cvc\":\"123\"}},\"billing\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"shipping\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"statement_descriptor_name\":\"joseph\",\"statement_descriptor_suffix\":\"JS\",\"metadata\":{\"udf1\":\"value1\",\"new_customer\":\"true\",\"login_date\":\"2019-09-10T10:11:12Z\"},\"order_details\":[{\"product_name\":\"Apple iphone 15\",\"quantity\":1,\"amount\":6540,\"account_name\":\"transaction_processing\"}]}" }, "url": { "raw": "{{baseUrl}}/payments", @@ -4506,7 +4506,7 @@ "language": "json" } }, - "raw": "{\"amount\":6540,\"currency\":\"USD\",\"confirm\":true,\"capture_method\":\"manual\",\"capture_on\":\"2022-09-10T10:11:12Z\",\"amount_to_capture\":6540,\"customer_id\":\"StripeCustomer\",\"email\":\"guest@example.com\",\"name\":\"John Doe\",\"phone\":\"999999999\",\"phone_country_code\":\"+65\",\"description\":\"Its my first payment request\",\"authentication_type\":\"no_three_ds\",\"return_url\":\"https://duck.com\",\"payment_method\":\"card\",\"payment_method_data\":{\"card\":{\"card_number\":\"4242424242424242\",\"card_exp_month\":\"10\",\"card_exp_year\":\"25\",\"card_holder_name\":\"joseph Doe\",\"card_cvc\":\"123\"}},\"billing\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"shipping\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"statement_descriptor_name\":\"joseph\",\"statement_descriptor_suffix\":\"JS\",\"metadata\":{\"udf1\":\"value1\",\"new_customer\":\"true\",\"login_date\":\"2019-09-10T10:11:12Z\"},\"order_details\":[{\"product_name\":\"Apple iphone 15\",\"quantity\":1,\"amount\":5500,\"account_name\":\"transaction_processing\"}]}" + "raw": "{\"amount\":6540,\"currency\":\"USD\",\"confirm\":true,\"capture_method\":\"manual\",\"capture_on\":\"2022-09-10T10:11:12Z\",\"amount_to_capture\":6540,\"customer_id\":\"StripeCustomer\",\"email\":\"guest@example.com\",\"name\":\"John Doe\",\"phone\":\"999999999\",\"phone_country_code\":\"+65\",\"description\":\"Its my first payment request\",\"authentication_type\":\"no_three_ds\",\"return_url\":\"https://duck.com\",\"payment_method\":\"card\",\"payment_method_data\":{\"card\":{\"card_number\":\"4242424242424242\",\"card_exp_month\":\"10\",\"card_exp_year\":\"25\",\"card_holder_name\":\"joseph Doe\",\"card_cvc\":\"123\"}},\"billing\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"shipping\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"statement_descriptor_name\":\"joseph\",\"statement_descriptor_suffix\":\"JS\",\"metadata\":{\"udf1\":\"value1\",\"new_customer\":\"true\",\"login_date\":\"2019-09-10T10:11:12Z\"},\"order_details\":[{\"product_name\":\"Apple iphone 15\",\"quantity\":1,\"amount\":6540,\"account_name\":\"transaction_processing\"}]}" }, "url": { "raw": "{{baseUrl}}/payments", @@ -4886,7 +4886,7 @@ "language": "json" } }, - "raw": "{\"amount\":6540,\"currency\":\"USD\",\"confirm\":true,\"capture_method\":\"automatic\",\"capture_on\":\"2022-09-10T10:11:12Z\",\"amount_to_capture\":6540,\"customer_id\":\"StripeCustomer\",\"email\":\"guest@example.com\",\"name\":\"John Doe\",\"phone\":\"999999999\",\"phone_country_code\":\"+65\",\"description\":\"Its my first payment request\",\"authentication_type\":\"no_three_ds\",\"return_url\":\"https://duck.com\",\"payment_method\":\"card\",\"payment_method_data\":{\"card\":{\"card_number\":\"4242424242424242\",\"card_exp_month\":\"10\",\"card_exp_year\":\"25\",\"card_holder_name\":\"joseph Doe\",\"card_cvc\":\"123\"}},\"billing\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"shipping\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"statement_descriptor_name\":\"joseph\",\"statement_descriptor_suffix\":\"JS\",\"metadata\":{\"udf1\":\"value1\",\"new_customer\":\"true\",\"login_date\":\"2019-09-10T10:11:12Z\"},\"order_details\":[{\"product_name\":\"Apple iphone 15\",\"quantity\":1,\"amount\":5500,\"account_name\":\"transaction_processing\"}]}" + "raw": "{\"amount\":6540,\"currency\":\"USD\",\"confirm\":true,\"capture_method\":\"automatic\",\"capture_on\":\"2022-09-10T10:11:12Z\",\"amount_to_capture\":6540,\"customer_id\":\"StripeCustomer\",\"email\":\"guest@example.com\",\"name\":\"John Doe\",\"phone\":\"999999999\",\"phone_country_code\":\"+65\",\"description\":\"Its my first payment request\",\"authentication_type\":\"no_three_ds\",\"return_url\":\"https://duck.com\",\"payment_method\":\"card\",\"payment_method_data\":{\"card\":{\"card_number\":\"4242424242424242\",\"card_exp_month\":\"10\",\"card_exp_year\":\"25\",\"card_holder_name\":\"joseph Doe\",\"card_cvc\":\"123\"}},\"billing\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"shipping\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"statement_descriptor_name\":\"joseph\",\"statement_descriptor_suffix\":\"JS\",\"metadata\":{\"udf1\":\"value1\",\"new_customer\":\"true\",\"login_date\":\"2019-09-10T10:11:12Z\"},\"order_details\":[{\"product_name\":\"Apple iphone 15\",\"quantity\":1,\"amount\":6540,\"account_name\":\"transaction_processing\"}]}" }, "url": { "raw": "{{baseUrl}}/payments", @@ -5147,7 +5147,7 @@ "language": "json" } }, - "raw": "{\"amount\":6540,\"currency\":\"USD\",\"confirm\":true,\"capture_method\":\"automatic\",\"capture_on\":\"2022-09-10T10:11:12Z\",\"amount_to_capture\":6540,\"customer_id\":\"StripeCustomer\",\"email\":\"guest@example.com\",\"name\":\"John Doe\",\"phone\":\"999999999\",\"phone_country_code\":\"+65\",\"description\":\"Its my first payment request\",\"authentication_type\":\"no_three_ds\",\"return_url\":\"https://duck.com\",\"payment_method\":\"card\",\"payment_method_data\":{\"card\":{\"card_number\":\"4242424242424242\",\"card_exp_month\":\"10\",\"card_exp_year\":\"25\",\"card_holder_name\":\"joseph Doe\",\"card_cvc\":\"123\"}},\"billing\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"shipping\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"statement_descriptor_name\":\"joseph\",\"statement_descriptor_suffix\":\"JS\",\"metadata\":{\"udf1\":\"value1\",\"new_customer\":\"true\",\"login_date\":\"2019-09-10T10:11:12Z\"},\"order_details\":[{\"product_name\":\"Apple iphone 15\",\"quantity\":1,\"amount\":5500,\"account_name\":\"transaction_processing\"}]}" + "raw": "{\"amount\":6540,\"currency\":\"USD\",\"confirm\":true,\"capture_method\":\"automatic\",\"capture_on\":\"2022-09-10T10:11:12Z\",\"amount_to_capture\":6540,\"customer_id\":\"StripeCustomer\",\"email\":\"guest@example.com\",\"name\":\"John Doe\",\"phone\":\"999999999\",\"phone_country_code\":\"+65\",\"description\":\"Its my first payment request\",\"authentication_type\":\"no_three_ds\",\"return_url\":\"https://duck.com\",\"payment_method\":\"card\",\"payment_method_data\":{\"card\":{\"card_number\":\"4242424242424242\",\"card_exp_month\":\"10\",\"card_exp_year\":\"25\",\"card_holder_name\":\"joseph Doe\",\"card_cvc\":\"123\"}},\"billing\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"shipping\":{\"address\":{\"line1\":\"1467\",\"line2\":\"Harrison Street\",\"line3\":\"Harrison Street\",\"city\":\"San Fransico\",\"state\":\"California\",\"zip\":\"94122\",\"country\":\"US\",\"first_name\":\"PiX\",\"last_name\":\"gnana\"}},\"statement_descriptor_name\":\"joseph\",\"statement_descriptor_suffix\":\"JS\",\"metadata\":{\"udf1\":\"value1\",\"new_customer\":\"true\",\"login_date\":\"2019-09-10T10:11:12Z\"},\"order_details\":[{\"product_name\":\"Apple iphone 15\",\"quantity\":1,\"amount\":6540,\"account_name\":\"transaction_processing\"}]}" }, "url": { "raw": "{{baseUrl}}/payments",