diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 638d5540d3d6..3024477bac20 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -33,6 +33,17 @@ crates/router/src/compatibility/ @juspay/hyperswitch-compatibility
crates/router/src/core/ @juspay/hyperswitch-core
+crates/api_models/src/routing.rs @juspay/hyperswitch-routing
+crates/euclid @juspay/hyperswitch-routing
+crates/euclid_macros @juspay/hyperswitch-routing
+crates/euclid_wasm @juspay/hyperswitch-routing
+crates/kgraph_utils @juspay/hyperswitch-routing
+crates/router/src/routes/routing.rs @juspay/hyperswitch-routing
+crates/router/src/core/routing @juspay/hyperswitch-routing
+crates/router/src/core/routing.rs @juspay/hyperswitch-routing
+crates/router/src/core/payments/routing @juspay/hyperswitch-routing
+crates/router/src/core/payments/routing.rs @juspay/hyperswitch-routing
+
crates/router/src/scheduler/ @juspay/hyperswitch-process-tracker
Dockerfile @juspay/hyperswitch-infra
diff --git a/.github/git-cliff-release.toml b/.github/git-cliff-release.toml
deleted file mode 100644
index 1b82c812b5d8..000000000000
--- a/.github/git-cliff-release.toml
+++ /dev/null
@@ -1,89 +0,0 @@
-# configuration file for git-cliff
-# see https://github.com/orhun/git-cliff#configuration-file
-
-[changelog]
-# changelog header
-header = ""
-# template for the changelog body
-# https://tera.netlify.app/docs/#introduction
-body = """
-{% set newline = "\n" -%}
-{% set commit_base_url = "https://github.com/juspay/hyperswitch/commit/" -%}
-{% set compare_base_url = "https://github.com/juspay/hyperswitch/compare/" -%}
-{% if version -%}
- ## {{ version | trim_start_matches(pat="v") }} ({{ timestamp | date(format="%Y-%m-%d") }})
-{% else -%}
- ## [unreleased]
-{% endif -%}
-{% for group, commits in commits | group_by(attribute="group") %}
- {# The `striptags` removes the HTML comments added while grouping -#}
- ### {{ group | striptags | trim | upper_first }}
- {% for scope, commits in commits | group_by(attribute="scope") %}
- - {{ "**" ~ scope ~ ":" ~ "**" -}}
- {% for commit in commits -%}
- {% if commits | length != 1 %}{{ newline ~ " - " }}{% else %}{{ " " }}{% endif -%}
- {{ commit.message | upper_first | trim }} ([`{{ commit.id | truncate(length=7, end="") }}`]({{ commit_base_url ~ commit.id }})) by {{ commit.author.email -}}
- {%- endfor -%}
- {%- endfor -%}
- {%- for commit in commits -%}
- {% if commit.scope %}{% else %}
- - {{ commit.message | upper_first | trim }} ([`{{ commit.id | truncate(length=7, end="") }}`]({{ commit_base_url ~ commit.id }})) by {{ commit.author.email -}}
- {%- endif %}
- {%- endfor %}
-{% endfor %}
-{% if previous and previous.commit_id and commit_id -%}
- **Full Changelog:** [`{{ previous.version }}...{{ version }}`]({{ compare_base_url }}{{ previous.version }}...{{ version }})\n
-{% endif %}
-"""
-# remove the leading and trailing whitespace from the template
-trim = true
-# changelog footer
-footer = ""
-
-[git]
-# parse the commits based on https://www.conventionalcommits.org
-conventional_commits = true
-# filter out the commits that are not conventional
-filter_unconventional = false
-# process each line of a commit as an individual commit
-split_commits = false
-# regex for preprocessing the commit messages
-commit_preprocessors = [
- { pattern = "^ +", replace = "" }, # remove spaces at the beginning of the message
- { pattern = " +", replace = " " }, # replace multiple spaces with a single space
- { pattern = "\\(#([0-9]+)\\)", replace = "([#${1}](https://github.com/juspay/hyperswitch/pull/${1}))" }, # replace PR numbers with links
- { pattern = "(\\n?Co-authored-by: .+ <.+@.+>\\n?)+", replace = "" }, # remove co-author information
- { pattern = "(\\n?Signed-off-by: .+ <.+@.+>\\n?)+", replace = "" }, # remove sign-off information
-]
-# regex for parsing and grouping commits
-# the HTML comments (``) are a workaround to get sections in custom order, since `git-cliff` sorts sections in alphabetical order
-# reference: https://github.com/orhun/git-cliff/issues/9
-commit_parsers = [
- { message = "^(?i)(feat)", group = "Features" },
- { message = "^(?i)(fix)", group = "Bug Fixes" },
- { message = "^(?i)(perf)", group = "Performance" },
- { body = ".*security", group = "Security" },
- { message = "^(?i)(refactor)", group = "Refactors" },
- { message = "^(?i)(test)", group = "Testing" },
- { message = "^(?i)(docs)", group = "Documentation" },
- { message = "^(?i)(chore\\(version\\)): V[\\d]+\\.[\\d]+\\.[\\d]+", skip = true },
- { message = "^(?i)(chore)", group = "Miscellaneous Tasks" },
- { message = "^(?i)(build)", group = "Build System / Dependencies" },
- { message = "^(?i)(ci)", skip = true },
-]
-# protect breaking changes from being skipped due to matching a skipping commit_parser
-protect_breaking_commits = false
-# filter out the commits that are not matched by commit parsers
-filter_commits = false
-# glob pattern for matching git tags
-tag_pattern = "v[0-9]*"
-# regex for skipping tags
-# skip_tags = "v0.1.0-beta.1"
-# regex for ignoring tags
-# ignore_tags = ""
-# sort the tags topologically
-topo_order = true
-# sort the commits inside sections by oldest/newest order
-sort_commits = "oldest"
-# limit the number of commits included in the changelog.
-# limit_commits = 42
diff --git a/.github/secrets/connector_auth.toml.gpg b/.github/secrets/connector_auth.toml.gpg
index 487e436df463..7da9189ade58 100644
Binary files a/.github/secrets/connector_auth.toml.gpg and b/.github/secrets/connector_auth.toml.gpg differ
diff --git a/.github/workflows/CI-pr.yml b/.github/workflows/CI-pr.yml
index c79ffa63709a..ecb13f3c1a85 100644
--- a/.github/workflows/CI-pr.yml
+++ b/.github/workflows/CI-pr.yml
@@ -41,17 +41,25 @@ jobs:
name: Check formatting
runs-on: ubuntu-latest
steps:
+ - name: Generate a token
+ if: ${{ github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name }}
+ id: generate_token
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ secrets.HYPERSWITCH_BOT_APP_ID }}
+ private-key: ${{ secrets.HYPERSWITCH_BOT_APP_PRIVATE_KEY }}
+
- name: Checkout repository with token
if: ${{ github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name }}
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ github.event.pull_request.head.ref }}
- token: ${{ secrets.AUTO_FILE_UPDATE_PAT }}
+ token: ${{ steps.generate_token.outputs.token }}
- name: Checkout repository for fork
if: ${{ github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name }}
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@master
@@ -71,8 +79,8 @@ jobs:
cargo +nightly fmt --all
if ! git diff --exit-code --quiet -- crates; then
echo "::notice::Formatting check failed"
- git config --local user.name 'github-actions[bot]'
- git config --local user.email '41898282+github-actions[bot]@users.noreply.github.com'
+ git config --local user.name 'hyperswitch-bot[bot]'
+ git config --local user.email '148525504+hyperswitch-bot[bot]@users.noreply.github.com'
git add crates
git commit --message 'chore: run formatter'
git push
@@ -91,7 +99,7 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: "Fetch base branch"
shell: bash
@@ -108,12 +116,12 @@ jobs:
with:
toolchain: 1.65
- - uses: Swatinem/rust-cache@v2.4.0
+ - uses: Swatinem/rust-cache@v2.7.0
with:
save-if: ${{ github.event_name == 'push' }}
- name: Install cargo-hack
- uses: baptiste0928/cargo-install@v2.1.0
+ uses: baptiste0928/cargo-install@v2.2.0
with:
crate: cargo-hack
version: 0.6.5
@@ -280,7 +288,7 @@ jobs:
# steps:
# - name: Checkout repository
- # uses: actions/checkout@v3
+ # uses: actions/checkout@v4
# - name: Run cargo-deny
# uses: EmbarkStudios/cargo-deny-action@v1.3.2
@@ -299,17 +307,25 @@ jobs:
# - windows-latest
steps:
+ - name: Generate a token
+ if: ${{ github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name }}
+ id: generate_token
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ secrets.HYPERSWITCH_BOT_APP_ID }}
+ private-key: ${{ secrets.HYPERSWITCH_BOT_APP_PRIVATE_KEY }}
+
- name: Checkout repository for fork
if: ${{ (github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name) }}
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Checkout repository with token
if: ${{ (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) }}
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ github.event.pull_request.head.ref }}
- token: ${{ secrets.AUTO_FILE_UPDATE_PAT }}
+ token: ${{ steps.generate_token.outputs.token }}
- name: "Fetch base branch"
shell: bash
@@ -328,16 +344,16 @@ jobs:
components: clippy
- name: Install cargo-hack
- uses: baptiste0928/cargo-install@v2.1.0
+ uses: baptiste0928/cargo-install@v2.2.0
with:
crate: cargo-hack
# - name: Install cargo-nextest
- # uses: baptiste0928/cargo-install@v2.1.0
+ # uses: baptiste0928/cargo-install@v2.2.0
# with:
# crate: cargo-nextest
- - uses: Swatinem/rust-cache@v2.4.0
+ - uses: Swatinem/rust-cache@v2.7.0
with:
save-if: ${{ github.event_name == 'push' }}
@@ -360,8 +376,8 @@ jobs:
shell: bash
run: |
if ! git diff --quiet --exit-code -- Cargo.lock ; then
- git config --local user.name 'github-actions[bot]'
- git config --local user.email '41898282+github-actions[bot]@users.noreply.github.com'
+ git config --local user.name 'hyperswitch-bot[bot]'
+ git config --local user.email '148525504+hyperswitch-bot[bot]@users.noreply.github.com'
git add Cargo.lock
git commit --message 'chore: update Cargo.lock'
git push
@@ -516,7 +532,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Spell check
uses: crate-ci/typos@master
diff --git a/.github/workflows/CI-push.yml b/.github/workflows/CI-push.yml
index edc9317e526d..a6a4bde5a5d4 100644
--- a/.github/workflows/CI-push.yml
+++ b/.github/workflows/CI-push.yml
@@ -25,7 +25,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@master
@@ -50,7 +50,7 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Install mold linker
uses: rui314/setup-mold@v1
@@ -63,12 +63,12 @@ jobs:
with:
toolchain: 1.65
- - uses: Swatinem/rust-cache@v2.4.0
+ - uses: Swatinem/rust-cache@v2.7.0
with:
save-if: ${{ github.event_name == 'push' }}
- name: Install cargo-hack
- uses: baptiste0928/cargo-install@v2.1.0
+ uses: baptiste0928/cargo-install@v2.2.0
with:
crate: cargo-hack
version: 0.6.5
@@ -101,7 +101,7 @@ jobs:
# steps:
# - name: Checkout repository
- # uses: actions/checkout@v3
+ # uses: actions/checkout@v4
# - name: Run cargo-deny
# uses: EmbarkStudios/cargo-deny-action@v1.3.2
@@ -121,7 +121,7 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Install mold linker
uses: rui314/setup-mold@v1
@@ -136,16 +136,16 @@ jobs:
components: clippy
- name: Install cargo-hack
- uses: baptiste0928/cargo-install@v2.1.0
+ uses: baptiste0928/cargo-install@v2.2.0
with:
crate: cargo-hack
# - name: Install cargo-nextest
- # uses: baptiste0928/cargo-install@v2.1.0
+ # uses: baptiste0928/cargo-install@v2.2.0
# with:
# crate: cargo-nextest
- - uses: Swatinem/rust-cache@v2.4.0
+ - uses: Swatinem/rust-cache@v2.7.0
with:
save-if: ${{ github.event_name == 'push' }}
@@ -178,7 +178,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Spell check
uses: crate-ci/typos@master
diff --git a/.github/workflows/auto-release-tag.yml b/.github/workflows/auto-release-tag.yml
index 5334c914cda5..4555b68764c1 100644
--- a/.github/workflows/auto-release-tag.yml
+++ b/.github/workflows/auto-release-tag.yml
@@ -10,18 +10,18 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Login to Docker Hub
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_PASSWD }}
- name: Build and push router Docker image
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v5
with:
build-args: |
BINARY=router
@@ -30,7 +30,7 @@ jobs:
tags: juspaydotin/orca:${{ github.ref_name }}, juspaydotin/hyperswitch-router:${{ github.ref_name }}
- name: Build and push consumer Docker image
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v5
with:
build-args: |
BINARY=scheduler
@@ -40,7 +40,7 @@ jobs:
tags: juspaydotin/orca-consumer:${{ github.ref_name }}, juspaydotin/hyperswitch-consumer:${{ github.ref_name }}
- name: Build and push producer Docker image
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v5
with:
build-args: |
BINARY=scheduler
@@ -50,7 +50,7 @@ jobs:
tags: juspaydotin/orca-producer:${{ github.ref_name }}, juspaydotin/hyperswitch-producer:${{ github.ref_name }}
- name: Build and push drainer Docker image
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v5
with:
build-args: |
BINARY=drainer
diff --git a/.github/workflows/connector-sanity-tests.yml b/.github/workflows/connector-sanity-tests.yml
index 40a3c3612503..48e6a946a450 100644
--- a/.github/workflows/connector-sanity-tests.yml
+++ b/.github/workflows/connector-sanity-tests.yml
@@ -79,14 +79,14 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@master
with:
toolchain: stable 2 weeks ago
- - uses: Swatinem/rust-cache@v2.4.0
+ - uses: Swatinem/rust-cache@v2.7.0
- name: Decrypt connector auth file
env:
diff --git a/.github/workflows/connector-ui-sanity-tests.yml b/.github/workflows/connector-ui-sanity-tests.yml
index 5db45f2962a5..d4317681a113 100644
--- a/.github/workflows/connector-ui-sanity-tests.yml
+++ b/.github/workflows/connector-ui-sanity-tests.yml
@@ -82,7 +82,7 @@ jobs:
- name: Checkout repository
if: ${{ (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) }}
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Decrypt connector auth file
if: ${{ (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) }}
@@ -113,10 +113,10 @@ jobs:
toolchain: stable
- name: Build and Cache Rust Dependencies
- uses: Swatinem/rust-cache@v2.4.0
+ uses: Swatinem/rust-cache@v2.7.0
- name: Install Diesel CLI with Postgres Support
- uses: baptiste0928/cargo-install@v2.1.0
+ uses: baptiste0928/cargo-install@v2.2.0
if: ${{ (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) }}
with:
crate: diesel_cli
diff --git a/.github/workflows/conventional-commit-check.yml b/.github/workflows/conventional-commit-check.yml
index 5fd25e9332d1..ad01642068b5 100644
--- a/.github/workflows/conventional-commit-check.yml
+++ b/.github/workflows/conventional-commit-check.yml
@@ -45,7 +45,7 @@ jobs:
with:
toolchain: stable 2 weeks ago
- - uses: baptiste0928/cargo-install@v2.1.0
+ - uses: baptiste0928/cargo-install@v2.2.0
with:
crate: cocogitto
diff --git a/.github/workflows/create-hotfix-branch.yml b/.github/workflows/create-hotfix-branch.yml
index 77a8bad6bc66..6fd2d4947719 100644
--- a/.github/workflows/create-hotfix-branch.yml
+++ b/.github/workflows/create-hotfix-branch.yml
@@ -8,11 +8,19 @@ jobs:
runs-on: ubuntu-latest
steps:
+ - name: Generate a token
+ if: ${{ github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name }}
+ id: generate_token
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ secrets.HYPERSWITCH_BOT_APP_ID }}
+ private-key: ${{ secrets.HYPERSWITCH_BOT_APP_PRIVATE_KEY }}
+
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 0
- token: ${{ secrets.AUTO_RELEASE_PAT }}
+ token: ${{ steps.generate_token.outputs.token }}
- name: Check if the input is valid tag
shell: bash
diff --git a/.github/workflows/create-hotfix-tag.yml b/.github/workflows/create-hotfix-tag.yml
index 45699bda24dc..e9df004139e0 100644
--- a/.github/workflows/create-hotfix-tag.yml
+++ b/.github/workflows/create-hotfix-tag.yml
@@ -8,14 +8,22 @@ jobs:
runs-on: ubuntu-latest
steps:
+ - name: Generate a token
+ if: ${{ github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name }}
+ id: generate_token
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ secrets.HYPERSWITCH_BOT_APP_ID }}
+ private-key: ${{ secrets.HYPERSWITCH_BOT_APP_PRIVATE_KEY }}
+
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 0
- token: ${{ secrets.AUTO_RELEASE_PAT }}
+ token: ${{ steps.generate_token.outputs.token }}
- name: Install git-cliff
- uses: baptiste0928/cargo-install@v2.1.0
+ uses: baptiste0928/cargo-install@v2.2.0
with:
crate: git-cliff
version: 1.2.0
@@ -86,8 +94,8 @@ jobs:
- name: Set Git Configuration
shell: bash
run: |
- git config --local user.name 'github-actions'
- git config --local user.email '41898282+github-actions[bot]@users.noreply.github.com'
+ git config --local user.name 'hyperswitch-bot[bot]'
+ git config --local user.email '148525504+hyperswitch-bot[bot]@users.noreply.github.com'
- name: Push created commit and tag
shell: bash
diff --git a/.github/workflows/hotfix-pr-check.yml b/.github/workflows/hotfix-pr-check.yml
index 59e0bbee3cb4..e178ba31c1e8 100644
--- a/.github/workflows/hotfix-pr-check.yml
+++ b/.github/workflows/hotfix-pr-check.yml
@@ -15,12 +15,13 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Get hotfix pull request body
shell: bash
- run: |
- echo '${{ github.event.pull_request.body }}' > hotfix_pr_body.txt
+ env:
+ PR_BODY: ${{ github.event.pull_request.body }}
+ run: echo $PR_BODY > hotfix_pr_body.txt
- name: Get a list of all original PR numbers
shell: bash
diff --git a/.github/workflows/manual-release.yml b/.github/workflows/manual-release.yml
index 0b70631e113d..9ae80047a669 100644
--- a/.github/workflows/manual-release.yml
+++ b/.github/workflows/manual-release.yml
@@ -17,18 +17,18 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Login to Docker Hub
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_PASSWD }}
- name: Build and push router Docker image
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v5
with:
build-args: |
RUN_ENV=${{ inputs.environment }}
@@ -39,7 +39,7 @@ jobs:
tags: juspaydotin/orca:${{ github.sha }}
- name: Build and push consumer Docker image
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v5
with:
build-args: |
RUN_ENV=${{ inputs.environment }}
@@ -50,7 +50,7 @@ jobs:
tags: juspaydotin/orca-consumer:${{ github.sha }}
- name: Build and push producer Docker image
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v5
with:
build-args: |
RUN_ENV=${{ inputs.environment }}
@@ -61,7 +61,7 @@ jobs:
tags: juspaydotin/orca-producer:${{ github.sha }}
- name: Build and push drainer Docker image
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v5
with:
build-args: |
RUN_ENV=${{ inputs.environment }}
diff --git a/.github/workflows/migration-check.yaml b/.github/workflows/migration-check.yaml
index 0c4baaa96193..b740bd3a5b77 100644
--- a/.github/workflows/migration-check.yaml
+++ b/.github/workflows/migration-check.yaml
@@ -40,14 +40,14 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@master
with:
toolchain: stable 2 weeks ago
- - uses: baptiste0928/cargo-install@v2.1.0
+ - uses: baptiste0928/cargo-install@v2.2.0
with:
crate: diesel_cli
features: postgres
diff --git a/.github/workflows/postman-collection-runner.yml b/.github/workflows/postman-collection-runner.yml
index 3291755b56cf..d5434520715f 100644
--- a/.github/workflows/postman-collection-runner.yml
+++ b/.github/workflows/postman-collection-runner.yml
@@ -50,7 +50,7 @@ jobs:
steps:
- name: Repository checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Decrypt connector auth file
if: ${{ ((github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name)) || (github.event_name == 'merge_group')}}
@@ -82,11 +82,11 @@ jobs:
- name: Build and Cache Rust Dependencies
if: ${{ ((github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name)) || (github.event_name == 'merge_group')}}
- uses: Swatinem/rust-cache@v2.4.0
+ uses: Swatinem/rust-cache@v2.7.0
- name: Install Diesel CLI with Postgres Support
if: ${{ ((github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name)) || (github.event_name == 'merge_group')}}
- uses: baptiste0928/cargo-install@v2.1.0
+ uses: baptiste0928/cargo-install@v2.2.0
with:
crate: diesel_cli
features: postgres
diff --git a/.github/workflows/pr-title-spell-check.yml b/.github/workflows/pr-title-spell-check.yml
index 6ab6f184739d..03b5a8758870 100644
--- a/.github/workflows/pr-title-spell-check.yml
+++ b/.github/workflows/pr-title-spell-check.yml
@@ -13,7 +13,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Store PR title in a file
shell: bash
diff --git a/.github/workflows/release-new-version.yml b/.github/workflows/release-new-version.yml
index 872c207e8aa3..2f8ae7e4819f 100644
--- a/.github/workflows/release-new-version.yml
+++ b/.github/workflows/release-new-version.yml
@@ -24,7 +24,7 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 0
token: ${{ secrets.AUTO_RELEASE_PAT }}
@@ -35,24 +35,11 @@ jobs:
toolchain: stable 2 weeks ago
- name: Install cocogitto
- uses: baptiste0928/cargo-install@v2.1.0
+ uses: baptiste0928/cargo-install@v2.2.0
with:
crate: cocogitto
version: 5.4.0
- - name: Install git-cliff
- uses: baptiste0928/cargo-install@v2.1.0
- with:
- crate: git-cliff
- version: 1.2.0
-
- - name: Install changelog-gh-usernames
- uses: baptiste0928/cargo-install@v2.1.0
- with:
- crate: changelog-gh-usernames
- git: https://github.com/SanchithHegde/changelog-gh-usernames
- rev: dab6da3ff99dbbff8650c114984c4d8be5161ac8
-
- name: Set Git Configuration
shell: bash
run: |
@@ -87,7 +74,7 @@ jobs:
PREVIOUS_TAG="$(git tag --sort='version:refname' --merged | tail --lines 1)"
if [[ "$(cog bump --auto --dry-run)" == *"No conventional commits for your repository that required a bump"* ]]; then
NEW_TAG="$(cog bump --patch --dry-run)"
- elif [[ "${PREVIOUS_TAG}" != "${NEW_TAG}" ]]; then
+ else
NEW_TAG="$(cog bump --auto --dry-run)"
fi
echo "NEW_TAG=${NEW_TAG}" >> $GITHUB_ENV
@@ -106,15 +93,3 @@ jobs:
run: |
git push
git push --tags
-
- - name: Generate release notes and create GitHub release
- shell: bash
- if: ${{ env.NEW_TAG != env.PREVIOUS_TAG }}
- env:
- GITHUB_TOKEN: ${{ github.token }}
- GH_TOKEN: ${{ secrets.AUTO_RELEASE_PAT }}
- # Need to consider commits inclusive of previous tag to generate diff link between versions.
- # This would also then require us to remove the last few lines from the changelog.
- run: |
- git-cliff --config .github/git-cliff-release.toml "${PREVIOUS_TAG}^..${NEW_TAG}" | changelog-gh-usernames | sed "/## ${PREVIOUS_TAG#v}/,\$d" > release-notes.md
- gh release create "${NEW_TAG}" --notes-file release-notes.md --verify-tag --title "Hyperswitch ${NEW_TAG}"
diff --git a/.github/workflows/validate-openapi-spec.yml b/.github/workflows/validate-openapi-spec.yml
index 530c59c9236d..bdb987d625ac 100644
--- a/.github/workflows/validate-openapi-spec.yml
+++ b/.github/workflows/validate-openapi-spec.yml
@@ -16,24 +16,32 @@ jobs:
name: Validate generated OpenAPI spec file
runs-on: ubuntu-latest
steps:
+ - name: Generate a token
+ if: ${{ github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name }}
+ id: generate_token
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ secrets.HYPERSWITCH_BOT_APP_ID }}
+ private-key: ${{ secrets.HYPERSWITCH_BOT_APP_PRIVATE_KEY }}
+
- name: Checkout PR from fork
if: ${{ (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name) }}
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.ref }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
- name: Checkout PR with token
if: ${{ (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) }}
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.ref }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
- token: ${{ secrets.AUTO_FILE_UPDATE_PAT }}
+ token: ${{ steps.generate_token.outputs.token }}
- name: Checkout merge group HEAD commit
if: ${{ github.event_name == 'merge_group' }}
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
ref: ${{ github.event.merge_group.head_sha }}
@@ -60,8 +68,8 @@ jobs:
shell: bash
run: |
if ! git diff --quiet --exit-code -- openapi/openapi_spec.json ; then
- git config --local user.name 'github-actions[bot]'
- git config --local user.email '41898282+github-actions[bot]@users.noreply.github.com'
+ git config --local user.name 'hyperswitch-bot[bot]'
+ git config --local user.email '148525504+hyperswitch-bot[bot]@users.noreply.github.com'
git add openapi/openapi_spec.json
git commit --message 'docs(openapi): re-generate OpenAPI specification'
git push
diff --git a/.typos.toml b/.typos.toml
index 1ac38a005c9e..4ce21526604b 100644
--- a/.typos.toml
+++ b/.typos.toml
@@ -24,6 +24,7 @@ optin = "optin" # Boku preflow name
optin_id = "optin_id" # Boku's id for optin flow
deriver = "deriver"
Deriver = "Deriver"
+requestor_card_reference = "requestor_card_reference"
[default.extend-words]
aci = "aci" # Name of a connector
diff --git a/CHANGELOG.md b/CHANGELOG.md
index e5da650def02..3831e3d1caf3 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,398 @@ All notable changes to HyperSwitch will be documented here.
- - -
+## 1.93.0 (2023-11-30)
+
+### Features
+
+- **connector:** [BANKOFAMERICA] Add Required Fields for GPAY ([#3014](https://github.com/juspay/hyperswitch/pull/3014)) ([`d30b58a`](https://github.com/juspay/hyperswitch/commit/d30b58abb5e716b70c2dadec9e6f13c9e3403b6f))
+- **core:** Add ability to verify connector credentials before integrating the connector ([#2986](https://github.com/juspay/hyperswitch/pull/2986)) ([`39f255b`](https://github.com/juspay/hyperswitch/commit/39f255b4b209588dec35d780078c2ab7ceb37b10))
+- **router:** Make core changes in payments flow to support incremental authorization ([#3009](https://github.com/juspay/hyperswitch/pull/3009)) ([`1ca2ba4`](https://github.com/juspay/hyperswitch/commit/1ca2ba459495ff9340954c87a6ae3e4dce0e7b71))
+- **user:** Add support for dashboard metadata ([#3000](https://github.com/juspay/hyperswitch/pull/3000)) ([`6a2e4ab`](https://github.com/juspay/hyperswitch/commit/6a2e4ab4169820f35e953a949bd2e82e7f098ed2))
+
+### Bug Fixes
+
+- **connector:**
+ - Move authorised status to charged in setup mandate ([#3017](https://github.com/juspay/hyperswitch/pull/3017)) ([`663754d`](https://github.com/juspay/hyperswitch/commit/663754d629d59a17ba9d4985fe04f9404ceb16b7))
+ - [Trustpay] Add mapping to error code `800.100.165` and `900.100.100` ([#2925](https://github.com/juspay/hyperswitch/pull/2925)) ([`8c37a8d`](https://github.com/juspay/hyperswitch/commit/8c37a8d857c5a58872fa2b2e194b85e755129677))
+- **core:** Error message on Refund update for `Not Implemented` Case ([#3011](https://github.com/juspay/hyperswitch/pull/3011)) ([`6b7ada1`](https://github.com/juspay/hyperswitch/commit/6b7ada1a34450ea3a7fc019375ba462a14ddd6ab))
+- **pm_list:** [Trustpay] Update Cards, Bank_redirect - blik pm type required field info for Trustpay ([#2999](https://github.com/juspay/hyperswitch/pull/2999)) ([`c05432c`](https://github.com/juspay/hyperswitch/commit/c05432c0bd70f222c2f898ce2cbb47a46364a490))
+- **router:**
+ - [Dlocal] connector transaction id fix ([#2872](https://github.com/juspay/hyperswitch/pull/2872)) ([`44b1f49`](https://github.com/juspay/hyperswitch/commit/44b1f4949ea06d59480670ccfa02446fa7713d13))
+ - Use default value for the routing algorithm column during business profile creation ([#2791](https://github.com/juspay/hyperswitch/pull/2791)) ([`b1fe76a`](https://github.com/juspay/hyperswitch/commit/b1fe76a82b4026d6eaa3baf4356378040880a458))
+- **routing:** Fix kgraph to exclude PM auth during construction ([#3019](https://github.com/juspay/hyperswitch/pull/3019)) ([`c6cb527`](https://github.com/juspay/hyperswitch/commit/c6cb527f07e23796c342f3562fbf3b61f1ef6801))
+
+### Refactors
+
+- **connector:**
+ - [Stax] change error message from NotSupported to NotImplemented ([#2879](https://github.com/juspay/hyperswitch/pull/2879)) ([`8a4dabc`](https://github.com/juspay/hyperswitch/commit/8a4dabc61df3e6012e50f785d93808ca3349be65))
+ - [Volt] change error message from NotSupported to NotImplemented ([#2878](https://github.com/juspay/hyperswitch/pull/2878)) ([`de8e31b`](https://github.com/juspay/hyperswitch/commit/de8e31b70d9b3c11e268cd1deffa71918dc4270d))
+ - [Adyen] Change country and issuer type to Optional for OpenBankingUk ([#2993](https://github.com/juspay/hyperswitch/pull/2993)) ([`ab3dac7`](https://github.com/juspay/hyperswitch/commit/ab3dac79b4f138cd1f60a9afc0635dcc137a4a05))
+- **postman:** Fix payme postman collection for handling `order_details` ([#2996](https://github.com/juspay/hyperswitch/pull/2996)) ([`1e60c71`](https://github.com/juspay/hyperswitch/commit/1e60c710985b341a118bb32962bd74b406d78f69))
+
+**Full Changelog:** [`v1.92.0...v1.93.0`](https://github.com/juspay/hyperswitch/compare/v1.92.0...v1.93.0)
+
+- - -
+
+
+## 1.92.0 (2023-11-29)
+
+### Features
+
+- **analytics:** Add Clickhouse based analytics ([#2988](https://github.com/juspay/hyperswitch/pull/2988)) ([`9df4e01`](https://github.com/juspay/hyperswitch/commit/9df4e0193ffeb6d1cc323bdebb7e2bdfb2a375e2))
+- **ses_email:** Add email services to hyperswitch ([#2977](https://github.com/juspay/hyperswitch/pull/2977)) ([`5f5e895`](https://github.com/juspay/hyperswitch/commit/5f5e895f638701a0e6ab3deea9101ef39033dd16))
+
+### Bug Fixes
+
+- **router:** Make use of warning to log errors when apple pay metadata parsing fails ([#3010](https://github.com/juspay/hyperswitch/pull/3010)) ([`2e57745`](https://github.com/juspay/hyperswitch/commit/2e57745352c547323ac2df2554f6bc2dbd6da37f))
+
+**Full Changelog:** [`v1.91.1...v1.92.0`](https://github.com/juspay/hyperswitch/compare/v1.91.1...v1.92.0)
+
+- - -
+
+
+## 1.91.1 (2023-11-29)
+
+### Bug Fixes
+
+- Remove `dummy_connector` from `default` features in `common_enums` ([#3005](https://github.com/juspay/hyperswitch/pull/3005)) ([`bb593ab`](https://github.com/juspay/hyperswitch/commit/bb593ab0cd1a30190b6c305f2432de83ac7fde93))
+- Remove error propagation if card name not found in locker in case of temporary token ([#3006](https://github.com/juspay/hyperswitch/pull/3006)) ([`5c32b37`](https://github.com/juspay/hyperswitch/commit/5c32b3739e2c5895fe7f5cf8cc92f917c2639eac))
+- Few fields were not getting updated in apply_changeset function ([#3002](https://github.com/juspay/hyperswitch/pull/3002)) ([`d289524`](https://github.com/juspay/hyperswitch/commit/d289524869f0c3835db9cf90d57ebedf560e0291))
+
+### Miscellaneous Tasks
+
+- **deps:** Bump openssl from 0.10.57 to 0.10.60 ([#3004](https://github.com/juspay/hyperswitch/pull/3004)) ([`1c2f35a`](https://github.com/juspay/hyperswitch/commit/1c2f35af92608fca5836448710eca9f9c23a776a))
+
+**Full Changelog:** [`v1.91.0...v1.91.1`](https://github.com/juspay/hyperswitch/compare/v1.91.0...v1.91.1)
+
+- - -
+
+
+## 1.91.0 (2023-11-28)
+
+### Features
+
+- **core:**
+ - [Paypal] Add Preprocessing flow to CompleteAuthorize for Card 3DS Auth Verification ([#2757](https://github.com/juspay/hyperswitch/pull/2757)) ([`77fc92c`](https://github.com/juspay/hyperswitch/commit/77fc92c99a99aaf76d270ba5b981928183a05768))
+ - Enable payment refund when payment is partially captured ([#2991](https://github.com/juspay/hyperswitch/pull/2991)) ([`837480d`](https://github.com/juspay/hyperswitch/commit/837480d935cce8cc35f07c5ccb3560285909bc52))
+- **currency_conversion:** Add currency conversion feature ([#2948](https://github.com/juspay/hyperswitch/pull/2948)) ([`c0116db`](https://github.com/juspay/hyperswitch/commit/c0116db271f6afc1b93c04705209bfc346228c68))
+- **payment_methods:** Receive `card_holder_name` in confirm flow when using token for payment ([#2982](https://github.com/juspay/hyperswitch/pull/2982)) ([`e7ad3a4`](https://github.com/juspay/hyperswitch/commit/e7ad3a4db8823f3ae8d381771739670d8350e6da))
+
+### Bug Fixes
+
+- **connector:** [Adyen] `ErrorHandling` in case of Balance Check for Gift Cards ([#1976](https://github.com/juspay/hyperswitch/pull/1976)) ([`bd889c8`](https://github.com/juspay/hyperswitch/commit/bd889c834dd5e201b055233016f7226fa2187aea))
+- **core:** Replace euclid enum with RoutableConnectors enum ([#2994](https://github.com/juspay/hyperswitch/pull/2994)) ([`ff6a0dd`](https://github.com/juspay/hyperswitch/commit/ff6a0dd0b515778b64a3e28ef905154eee85ec78))
+- Remove error propagation if card name not found in locker ([#2998](https://github.com/juspay/hyperswitch/pull/2998)) ([`1c5a9b5`](https://github.com/juspay/hyperswitch/commit/1c5a9b5452afc33b18f45389bf3bdfd80820f476))
+
+### Refactors
+
+- **events:** Adding changes to type of API events to Kafka ([#2992](https://github.com/juspay/hyperswitch/pull/2992)) ([`d63f6f7`](https://github.com/juspay/hyperswitch/commit/d63f6f7224f35018e7c707353508bbacc2baed5c))
+- **masking:** Use empty enums as masking:Strategy types ([#2874](https://github.com/juspay/hyperswitch/pull/2874)) ([`0e66b1b`](https://github.com/juspay/hyperswitch/commit/0e66b1b5dcce6dd87c9d743c9eb73d0cd8e330b2))
+- **router:** Add openapi spec support for merchant_connector apis ([#2997](https://github.com/juspay/hyperswitch/pull/2997)) ([`cdbb385`](https://github.com/juspay/hyperswitch/commit/cdbb3853cd44443f8487abc16a9ba5d99f22e475))
+- Added min idle and max lifetime for database config ([#2900](https://github.com/juspay/hyperswitch/pull/2900)) ([`b3c51e6`](https://github.com/juspay/hyperswitch/commit/b3c51e6eb55c58adc024ee32b59c3910b2b72131))
+
+### Testing
+
+- **postman:** Update postman collection files ([`af6b05c`](https://github.com/juspay/hyperswitch/commit/af6b05c504b6fdbec7db77fa7f71535d7fea3e7a))
+
+**Full Changelog:** [`v1.90.0...v1.91.0`](https://github.com/juspay/hyperswitch/compare/v1.90.0...v1.91.0)
+
+- - -
+
+
+## 1.90.0 (2023-11-27)
+
+### Features
+
+- **auth:** Add Authorization for JWT Authentication types ([#2973](https://github.com/juspay/hyperswitch/pull/2973)) ([`03c0a77`](https://github.com/juspay/hyperswitch/commit/03c0a772a99000acf4676db8ca2ce916036281d1))
+- **user:** Implement change password for user ([#2959](https://github.com/juspay/hyperswitch/pull/2959)) ([`bfa1645`](https://github.com/juspay/hyperswitch/commit/bfa1645b847fb881eb2370d5dbfef6fd0b53725d))
+
+### Bug Fixes
+
+- **router:** Added validation to check total orderDetails amount equal to amount in request ([#2965](https://github.com/juspay/hyperswitch/pull/2965)) ([`37532d4`](https://github.com/juspay/hyperswitch/commit/37532d46f599a99e0e021b0455a6f02381005dd7))
+- Add prefix to connector_transaction_id ([#2981](https://github.com/juspay/hyperswitch/pull/2981)) ([`107c3b9`](https://github.com/juspay/hyperswitch/commit/107c3b99417dd7bca7b62741ad601485700f37be))
+
+### Refactors
+
+- **connector:** [Nuvei] update error message ([#2867](https://github.com/juspay/hyperswitch/pull/2867)) ([`04b7c03`](https://github.com/juspay/hyperswitch/commit/04b7c0384dc9290bd60f49033fd35732527720f1))
+
+### Testing
+
+- **postman:** Update postman collection files ([`aee59e0`](https://github.com/juspay/hyperswitch/commit/aee59e088a8e7c1b81aca1015c90c7b4fd07511d))
+
+### Documentation
+
+- **try_local_system:** Add instructions to run using Docker Compose by pulling standalone images ([#2984](https://github.com/juspay/hyperswitch/pull/2984)) ([`0fa8ad1`](https://github.com/juspay/hyperswitch/commit/0fa8ad1b7c27010bf83e4035de9881d29e192e8a))
+
+### Miscellaneous Tasks
+
+- **connector:** Update connector addition script ([#2801](https://github.com/juspay/hyperswitch/pull/2801)) ([`34953a0`](https://github.com/juspay/hyperswitch/commit/34953a046429fe0341e8469bd9b036e176bda205))
+
+**Full Changelog:** [`v1.89.0...v1.90.0`](https://github.com/juspay/hyperswitch/compare/v1.89.0...v1.90.0)
+
+- - -
+
+
+## 1.89.0 (2023-11-24)
+
+### Features
+
+- **router:** Add `connector_transaction_id` in error_response from connector flows ([#2972](https://github.com/juspay/hyperswitch/pull/2972)) ([`3322103`](https://github.com/juspay/hyperswitch/commit/3322103f5c9b7c2a5b663980246c6ca36b8dc63e))
+
+### Bug Fixes
+
+- **connector:** [BANKOFAMERICA] Add status VOIDED in enum Bankofameri… ([#2969](https://github.com/juspay/hyperswitch/pull/2969)) ([`203bbd7`](https://github.com/juspay/hyperswitch/commit/203bbd73751e1513206e81d7cf920ec263f83c58))
+- **core:** Error propagation for not supporting partial refund ([#2976](https://github.com/juspay/hyperswitch/pull/2976)) ([`97a38a7`](https://github.com/juspay/hyperswitch/commit/97a38a78e514e4fa3b5db46b6de985be6312dcc3))
+- **router:** Mark refund status as failure for not_implemented error from connector flows ([#2978](https://github.com/juspay/hyperswitch/pull/2978)) ([`d56d805`](https://github.com/juspay/hyperswitch/commit/d56d80557050336d5ed37282f1aa34b6c17389d1))
+- Return none instead of err when payment method data is not found for bank debit during listing ([#2967](https://github.com/juspay/hyperswitch/pull/2967)) ([`5cc829a`](https://github.com/juspay/hyperswitch/commit/5cc829a11f515a413fe19f657a90aa05cebb99b5))
+- Surcharge related status and rules fix ([#2974](https://github.com/juspay/hyperswitch/pull/2974)) ([`3db7213`](https://github.com/juspay/hyperswitch/commit/3db721388a7f0e291d7eb186661fc69a57068ea6))
+
+### Documentation
+
+- **README:** Updated Community Platform Mentions ([#2960](https://github.com/juspay/hyperswitch/pull/2960)) ([`e0bde43`](https://github.com/juspay/hyperswitch/commit/e0bde433282a34eb9eb28a2d9c43c2b17b5e65e5))
+- Add Rust locker information in architecture doc ([#2964](https://github.com/juspay/hyperswitch/pull/2964)) ([`b2f7dd1`](https://github.com/juspay/hyperswitch/commit/b2f7dd13925a1429e316cd9eaf0e2d31d46b6d4a))
+
+**Full Changelog:** [`v1.88.0...v1.89.0`](https://github.com/juspay/hyperswitch/compare/v1.88.0...v1.89.0)
+
+- - -
+
+
+## 1.88.0 (2023-11-23)
+
+### Features
+
+- **connector:** [BANKOFAMERICA] Implement Google Pay ([#2940](https://github.com/juspay/hyperswitch/pull/2940)) ([`f91d4ae`](https://github.com/juspay/hyperswitch/commit/f91d4ae11b02def92c1dde743a0c01b5aac5703f))
+- **router:** Allow billing and shipping address update in payments confirm flow ([#2963](https://github.com/juspay/hyperswitch/pull/2963)) ([`59ef162`](https://github.com/juspay/hyperswitch/commit/59ef162219db3e4650dde65710850bc9f3280530))
+
+### Bug Fixes
+
+- **connector:** [Prophetpay] Use refund_id as reference_id for Refund ([#2966](https://github.com/juspay/hyperswitch/pull/2966)) ([`dd3e22a`](https://github.com/juspay/hyperswitch/commit/dd3e22a938714f373477e08d1d25e4b84ac796c6))
+- **core:** Fix Default Values Enum FieldType ([#2934](https://github.com/juspay/hyperswitch/pull/2934)) ([`35a44ed`](https://github.com/juspay/hyperswitch/commit/35a44ed2533b748e3fabb8a2f8db4fa7e5d3cf7e))
+- **drainer:** Increase jobs picked only when stream is not empty ([#2958](https://github.com/juspay/hyperswitch/pull/2958)) ([`42eedf3`](https://github.com/juspay/hyperswitch/commit/42eedf3a8c2e62fc22bcead370d129ebaf11a00b))
+- Amount_captured goes to 0 for 3ds payments ([#2954](https://github.com/juspay/hyperswitch/pull/2954)) ([`75eea7e`](https://github.com/juspay/hyperswitch/commit/75eea7e81787f2e0697b930b82a8188193f8d51f))
+- Make drainer sleep on every loop interval instead of cycle end ([#2951](https://github.com/juspay/hyperswitch/pull/2951)) ([`e8df690`](https://github.com/juspay/hyperswitch/commit/e8df69092f4c6acee58109aaff2a9454fceb571a))
+
+### Refactors
+
+- **connector:**
+ - [Payeezy] update error message ([#2919](https://github.com/juspay/hyperswitch/pull/2919)) ([`cb65370`](https://github.com/juspay/hyperswitch/commit/cb653706066b889eaa9423a6227ce1df954b4759))
+ - [Worldline] change error message from NotSupported to NotImplemented ([#2893](https://github.com/juspay/hyperswitch/pull/2893)) ([`e721b06`](https://github.com/juspay/hyperswitch/commit/e721b06c7077e00458450a4fb98f4497e8227dc6))
+
+### Testing
+
+- **postman:** Update postman collection files ([`9a3fa00`](https://github.com/juspay/hyperswitch/commit/9a3fa00426d74f6d18b3c712b292d98d80d517ba))
+
+**Full Changelog:** [`v1.87.0...v1.88.0`](https://github.com/juspay/hyperswitch/compare/v1.87.0...v1.88.0)
+
+- - -
+
+
+## 1.87.0 (2023-11-22)
+
+### Features
+
+- **api_event_errors:** Error field in APIEvents ([#2808](https://github.com/juspay/hyperswitch/pull/2808)) ([`ce10579`](https://github.com/juspay/hyperswitch/commit/ce10579a729fe4a7d4ab9f1a4cbd38c3ca00e90b))
+- **payment_methods:** Add support for tokenising bank details and fetching masked details while listing ([#2585](https://github.com/juspay/hyperswitch/pull/2585)) ([`9989489`](https://github.com/juspay/hyperswitch/commit/998948953ab8a444aca79957f48e7cfb3066c334))
+- **router:**
+ - Migrate `payment_method_data` to rust locker only if `payment_method` is card ([#2929](https://github.com/juspay/hyperswitch/pull/2929)) ([`f8261a9`](https://github.com/juspay/hyperswitch/commit/f8261a96e758498a32c988191bf314aa6c752059))
+ - Add list payment link support ([#2805](https://github.com/juspay/hyperswitch/pull/2805)) ([`b441a1f`](https://github.com/juspay/hyperswitch/commit/b441a1f2f9d9d84601cf78a6e39145e8fb847593))
+- **routing:** Routing prometheus metrics ([#2870](https://github.com/juspay/hyperswitch/pull/2870)) ([`4e15d77`](https://github.com/juspay/hyperswitch/commit/4e15d7792e3167de170c3d8310f33419f4dfb0db))
+
+### Bug Fixes
+
+- cybersource mandates and fiserv exp year ([#2920](https://github.com/juspay/hyperswitch/pull/2920)) ([`7f74ae9`](https://github.com/juspay/hyperswitch/commit/7f74ae98a1d48eed98341e4505d3801a61e69fc7))
+- Kv logs when KeyNotSet is returned ([#2928](https://github.com/juspay/hyperswitch/pull/2928)) ([`6954de7`](https://github.com/juspay/hyperswitch/commit/6954de77a0fda14d87b79ec7ceee7cc8f1c491db))
+
+### Refactors
+
+- **macros:** Use syn2.0 ([#2890](https://github.com/juspay/hyperswitch/pull/2890)) ([`46e13d5`](https://github.com/juspay/hyperswitch/commit/46e13d54759168ad7667af08d5481ab510e5706a))
+- **mca:** Add Serialization for `ConnectorAuthType` ([#2945](https://github.com/juspay/hyperswitch/pull/2945)) ([`341374b`](https://github.com/juspay/hyperswitch/commit/341374b8e5eced329587b93cbb6bd58e16dd9932))
+
+### Testing
+
+- **postman:** Update postman collection files ([`b96052f`](https://github.com/juspay/hyperswitch/commit/b96052f9c64dd6e49d52ba8befd1f60a843b482a))
+
+### Documentation
+
+- **README:** Update feature support link ([#2894](https://github.com/juspay/hyperswitch/pull/2894)) ([`7d223ee`](https://github.com/juspay/hyperswitch/commit/7d223ee0d1b53c02421ed6bd1b5584362d7a7456))
+
+### Miscellaneous Tasks
+
+- Address Rust 1.74 clippy lints ([#2942](https://github.com/juspay/hyperswitch/pull/2942)) ([`c6a5a85`](https://github.com/juspay/hyperswitch/commit/c6a5a8574825dc333602f4f1cee7e26969eab030))
+
+**Full Changelog:** [`v1.86.0...v1.87.0`](https://github.com/juspay/hyperswitch/compare/v1.86.0...v1.87.0)
+
+- - -
+
+
+## 1.86.0 (2023-11-21)
+
+### Features
+
+- **connector:** [Prophetpay] Save card token for Refund and remove Void flow ([#2927](https://github.com/juspay/hyperswitch/pull/2927)) ([`15a255e`](https://github.com/juspay/hyperswitch/commit/15a255ea60dffad9e4cf20d642636028c27c7c00))
+- Add support for 3ds and surcharge decision through routing rules ([#2869](https://github.com/juspay/hyperswitch/pull/2869)) ([`f8618e0`](https://github.com/juspay/hyperswitch/commit/f8618e077065d94aa27d7153fc5ea6f93870bd81))
+
+### Bug Fixes
+
+- **mca:** Change the check for `disabled` field in mca create and update ([#2938](https://github.com/juspay/hyperswitch/pull/2938)) ([`e66ccde`](https://github.com/juspay/hyperswitch/commit/e66ccde4cf6d055b7d02c5e982d2e09364845602))
+- Status goes from pending to partially captured in psync ([#2915](https://github.com/juspay/hyperswitch/pull/2915)) ([`3f3b797`](https://github.com/juspay/hyperswitch/commit/3f3b797dc65c1bc6f710b122ef00d5bcb409e600))
+
+### Testing
+
+- **postman:** Update postman collection files ([`245e489`](https://github.com/juspay/hyperswitch/commit/245e489d13209da19d6e9af01219056eec04e897))
+
+**Full Changelog:** [`v1.85.0...v1.86.0`](https://github.com/juspay/hyperswitch/compare/v1.85.0...v1.86.0)
+
+- - -
+
+
+## 1.85.0 (2023-11-21)
+
+### Features
+
+- **mca:** Add new `auth_type` and a status field for mca ([#2883](https://github.com/juspay/hyperswitch/pull/2883)) ([`25cef38`](https://github.com/juspay/hyperswitch/commit/25cef386b8876b43893f20b93cd68ece6e68412d))
+- **router:** Add unified_code, unified_message in payments response ([#2918](https://github.com/juspay/hyperswitch/pull/2918)) ([`3954001`](https://github.com/juspay/hyperswitch/commit/39540015fde476ad8492a9142c2c1bfda8444a27))
+
+### Bug Fixes
+
+- **connector:**
+ - [fiserv] fix metadata deserialization in merchant_connector_account ([#2746](https://github.com/juspay/hyperswitch/pull/2746)) ([`644709d`](https://github.com/juspay/hyperswitch/commit/644709d95f6ecaab497cf0cf3788b9e2ed88b855))
+ - [CASHTOCODE] Fix Error Response Handling ([#2926](https://github.com/juspay/hyperswitch/pull/2926)) ([`938b63a`](https://github.com/juspay/hyperswitch/commit/938b63a1fceb87b4aae4211dac4d051e024028b1))
+- **router:** Associate parent payment token with `payment_method_id` as hyperswitch token for saved cards ([#2130](https://github.com/juspay/hyperswitch/pull/2130)) ([`efeebc0`](https://github.com/juspay/hyperswitch/commit/efeebc0f2365f0900de3dd3e10a1539621c9933d))
+- Api lock on PaymentsCreate ([#2916](https://github.com/juspay/hyperswitch/pull/2916)) ([`cfabfa6`](https://github.com/juspay/hyperswitch/commit/cfabfa60db4d275066be72ee64153a34d38f13b8))
+- Merchant_connector_id null in KV flow ([#2810](https://github.com/juspay/hyperswitch/pull/2810)) ([`e566a4e`](https://github.com/juspay/hyperswitch/commit/e566a4eff2270c2a56ec90966f42ccfd79906068))
+
+### Refactors
+
+- **connector:** [Paypal] Add support for both BodyKey and SignatureKey ([#2633](https://github.com/juspay/hyperswitch/pull/2633)) ([`d8fcd3c`](https://github.com/juspay/hyperswitch/commit/d8fcd3c9712480c1230590c4f23b35da79df784d))
+- **core:** Query business profile only once ([#2830](https://github.com/juspay/hyperswitch/pull/2830)) ([`44deeb7`](https://github.com/juspay/hyperswitch/commit/44deeb7e7605cb5320b84c0fac1fd551877803a4))
+- **payment_methods:** Added support for pm_auth_connector field in pm list response ([#2667](https://github.com/juspay/hyperswitch/pull/2667)) ([`be4aa3b`](https://github.com/juspay/hyperswitch/commit/be4aa3b913819698c6c22ddedafe1d90fbe02add))
+- Add mapping for ConnectorError in payouts flow ([#2608](https://github.com/juspay/hyperswitch/pull/2608)) ([`5c4e7c9`](https://github.com/juspay/hyperswitch/commit/5c4e7c9031f62d63af35da2dcab79eac948e7dbb))
+
+### Testing
+
+- **postman:** Update postman collection files ([`ce725ef`](https://github.com/juspay/hyperswitch/commit/ce725ef8c680eea3fe03671c989fd4572cfc0640))
+
+**Full Changelog:** [`v1.84.0...v1.85.0`](https://github.com/juspay/hyperswitch/compare/v1.84.0...v1.85.0)
+
+- - -
+
+
+## 1.84.0 (2023-11-17)
+
+### Features
+
+- **connector:** [BANKOFAMERICA] PSYNC Bugfix ([#2897](https://github.com/juspay/hyperswitch/pull/2897)) ([`bdcc138`](https://github.com/juspay/hyperswitch/commit/bdcc138e8d84577fc99f9a9aef3484b66f98209a))
+
+**Full Changelog:** [`v1.83.1...v1.84.0`](https://github.com/juspay/hyperswitch/compare/v1.83.1...v1.84.0)
+
+- - -
+
+
+## 1.83.1 (2023-11-17)
+
+### Bug Fixes
+
+- **router:** Add choice to use the appropriate key for jws verification ([#2917](https://github.com/juspay/hyperswitch/pull/2917)) ([`606daa9`](https://github.com/juspay/hyperswitch/commit/606daa9367cac8c2ea926313019deab2f938b591))
+
+**Full Changelog:** [`v1.83.0...v1.83.1`](https://github.com/juspay/hyperswitch/compare/v1.83.0...v1.83.1)
+
+- - -
+
+
+## 1.83.0 (2023-11-17)
+
+### Features
+
+- **events:** Add incoming webhook payload to api events logger ([#2852](https://github.com/juspay/hyperswitch/pull/2852)) ([`aea390a`](https://github.com/juspay/hyperswitch/commit/aea390a6a1c331f8e0dbea4f41218e43f7323508))
+- **router:** Custom payment link config for payment create ([#2741](https://github.com/juspay/hyperswitch/pull/2741)) ([`c39beb2`](https://github.com/juspay/hyperswitch/commit/c39beb2501e63bbf7fd41bbc947280d7ff5a71dc))
+
+### Bug Fixes
+
+- **router:** Add rust locker url in proxy_bypass_urls ([#2902](https://github.com/juspay/hyperswitch/pull/2902)) ([`9a201ae`](https://github.com/juspay/hyperswitch/commit/9a201ae698c2cf52e617660f82d5bf1df2e797ae))
+
+### Documentation
+
+- **README:** Replace cloudformation deployment template with latest s3 url. ([#2891](https://github.com/juspay/hyperswitch/pull/2891)) ([`375108b`](https://github.com/juspay/hyperswitch/commit/375108b6df50e041fc9dbeb35a6a6b46b146037a))
+
+**Full Changelog:** [`v1.82.0...v1.83.0`](https://github.com/juspay/hyperswitch/compare/v1.82.0...v1.83.0)
+
+- - -
+
+
+## 1.82.0 (2023-11-17)
+
+### Features
+
+- **router:** Add fallback while add card and retrieve card from rust locker ([#2888](https://github.com/juspay/hyperswitch/pull/2888)) ([`f735fb0`](https://github.com/juspay/hyperswitch/commit/f735fb0551812fd781a2db8bac5a0deef4cabb2b))
+
+### Bug Fixes
+
+- **core:** Introduce new attempt and intent status to handle multiple partial captures ([#2802](https://github.com/juspay/hyperswitch/pull/2802)) ([`cb88be0`](https://github.com/juspay/hyperswitch/commit/cb88be01f22725948648976c2a5606a03b5ce92a))
+
+### Testing
+
+- **postman:** Update postman collection files ([`7d05b74`](https://github.com/juspay/hyperswitch/commit/7d05b74b950d9e078b063e17d046cbeb501d006a))
+
+**Full Changelog:** [`v1.81.0...v1.82.0`](https://github.com/juspay/hyperswitch/compare/v1.81.0...v1.82.0)
+
+- - -
+
+
+## 1.81.0 (2023-11-16)
+
+### Features
+
+- **connector:**
+ - [BANKOFAMERICA] Implement Cards for Bank of America ([#2765](https://github.com/juspay/hyperswitch/pull/2765)) ([`e8de3a7`](https://github.com/juspay/hyperswitch/commit/e8de3a710710b92f5c2351c5d67c22352c2b0a30))
+ - [ProphetPay] Implement Card Redirect PaymentMethodType and flows for Authorize, CompleteAuthorize, Psync, Refund, Rsync and Void ([#2641](https://github.com/juspay/hyperswitch/pull/2641)) ([`8d4adc5`](https://github.com/juspay/hyperswitch/commit/8d4adc52af57ed0994e6efbb5b2d0d3df3fb3150))
+
+### Testing
+
+- **postman:** Update postman collection files ([`f829197`](https://github.com/juspay/hyperswitch/commit/f8291973c38bde874c45ca15ff8d48c1f2de9781))
+
+**Full Changelog:** [`v1.80.0...v1.81.0`](https://github.com/juspay/hyperswitch/compare/v1.80.0...v1.81.0)
+
+- - -
+
+
+## 1.80.0 (2023-11-16)
+
+### Features
+
+- **router:** Add api to migrate card from basilisk to rust ([#2853](https://github.com/juspay/hyperswitch/pull/2853)) ([`b8b20c4`](https://github.com/juspay/hyperswitch/commit/b8b20c412df0485bf395f9aa21e6e34e90d97acd))
+- Spawn webhooks and async scheduling in background ([#2780](https://github.com/juspay/hyperswitch/pull/2780)) ([`f248fe2`](https://github.com/juspay/hyperswitch/commit/f248fe2889c9cb68af4464ab0db1735224ab5c8d))
+
+### Refactors
+
+- **router:** Add openapi spec support for gsm apis ([#2871](https://github.com/juspay/hyperswitch/pull/2871)) ([`62c9cca`](https://github.com/juspay/hyperswitch/commit/62c9ccae6ab0d128c54962675b88739ad7797fe6))
+
+**Full Changelog:** [`v1.79.0...v1.80.0`](https://github.com/juspay/hyperswitch/compare/v1.79.0...v1.80.0)
+
+- - -
+
+
+## 1.79.0 (2023-11-16)
+
+### Features
+
+- Change async-bb8 fork and tokio spawn for concurrent database calls ([#2774](https://github.com/juspay/hyperswitch/pull/2774)) ([`d634fde`](https://github.com/juspay/hyperswitch/commit/d634fdeac349b92e3619234580299a6c6c38e6d4))
+
+### Bug Fixes
+
+- **connector:** [noon] add validate psync reference ([#2886](https://github.com/juspay/hyperswitch/pull/2886)) ([`b129023`](https://github.com/juspay/hyperswitch/commit/b1290234ba13de2dd8cc4210f63bae514c2988b4))
+- **payment_link:** Render SDK for status requires_payment_method ([#2887](https://github.com/juspay/hyperswitch/pull/2887)) ([`d4d2c2c`](https://github.com/juspay/hyperswitch/commit/d4d2c2c7076a46996aa0aa74d1df827169f73155))
+- Paypal postman collection changes for surcharge feature ([#2884](https://github.com/juspay/hyperswitch/pull/2884)) ([`5956242`](https://github.com/juspay/hyperswitch/commit/5956242588ef7bdbaa1804a952d48dc47c6e15f1))
+
+### Testing
+
+- **postman:** Update postman collection files ([`5c31365`](https://github.com/juspay/hyperswitch/commit/5c313656a129362b0e905e5fbf349dbbec57199c))
+
+**Full Changelog:** [`v1.78.0...v1.79.0`](https://github.com/juspay/hyperswitch/compare/v1.78.0...v1.79.0)
+
+- - -
+
+
## 1.78.0 (2023-11-14)
### Features
diff --git a/Cargo.lock b/Cargo.lock
index 1574933810b3..e8719b29f51d 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -9,12 +9,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "617a8268e3537fe1d8c9ead925fca49ef6400927ee7bc26750e90ecee14ce4b8"
dependencies = [
"bitflags 1.3.2",
- "bytes",
+ "bytes 1.5.0",
"futures-core",
"futures-sink",
"memchr",
"pin-project-lite",
- "tokio",
+ "tokio 1.32.0",
"tokio-util",
"tracing",
]
@@ -31,7 +31,7 @@ dependencies = [
"futures-util",
"log",
"once_cell",
- "smallvec",
+ "smallvec 1.11.1",
]
[[package]]
@@ -48,7 +48,7 @@ dependencies = [
"base64 0.21.4",
"bitflags 1.3.2",
"brotli",
- "bytes",
+ "bytes 1.5.0",
"bytestring",
"derive_more",
"encoding_rs",
@@ -66,8 +66,8 @@ dependencies = [
"pin-project-lite",
"rand 0.8.5",
"sha1",
- "smallvec",
- "tokio",
+ "smallvec 1.11.1",
+ "tokio 1.32.0",
"tokio-util",
"tracing",
"zstd",
@@ -92,7 +92,7 @@ dependencies = [
"actix-multipart-derive",
"actix-utils",
"actix-web",
- "bytes",
+ "bytes 1.5.0",
"derive_more",
"futures-core",
"futures-util",
@@ -105,7 +105,7 @@ dependencies = [
"serde_json",
"serde_plain",
"tempfile",
- "tokio",
+ "tokio 1.32.0",
]
[[package]]
@@ -114,7 +114,7 @@ version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a0a77f836d869f700e5b47ac7c3c8b9c8bc82e4aec861954c6198abee3ebd4d"
dependencies = [
- "darling 0.20.3",
+ "darling",
"parse-size",
"proc-macro2",
"quote",
@@ -142,7 +142,7 @@ checksum = "28f32d40287d3f402ae0028a9d54bef51af15c8769492826a69d28f81893151d"
dependencies = [
"actix-macros",
"futures-core",
- "tokio",
+ "tokio 1.32.0",
]
[[package]]
@@ -156,9 +156,9 @@ dependencies = [
"actix-utils",
"futures-core",
"futures-util",
- "mio",
+ "mio 0.8.8",
"socket2 0.5.4",
- "tokio",
+ "tokio 1.32.0",
"tracing",
]
@@ -188,7 +188,7 @@ dependencies = [
"pin-project-lite",
"rustls 0.21.7",
"rustls-webpki",
- "tokio",
+ "tokio 1.32.0",
"tokio-rustls",
"tokio-util",
"tracing",
@@ -221,9 +221,9 @@ dependencies = [
"actix-utils",
"actix-web-codegen",
"ahash 0.7.6",
- "bytes",
+ "bytes 1.5.0",
"bytestring",
- "cfg-if",
+ "cfg-if 1.0.0",
"cookie",
"derive_more",
"encoding_rs",
@@ -240,7 +240,7 @@ dependencies = [
"serde",
"serde_json",
"serde_urlencoded",
- "smallvec",
+ "smallvec 1.11.1",
"socket2 0.4.9",
"time",
"url",
@@ -296,7 +296,7 @@ version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
"getrandom 0.2.10",
"once_cell",
"version_check",
@@ -332,6 +332,36 @@ version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5"
+[[package]]
+name = "analytics"
+version = "0.1.0"
+dependencies = [
+ "actix-web",
+ "api_models",
+ "async-trait",
+ "aws-config",
+ "aws-sdk-lambda",
+ "aws-smithy-types",
+ "bigdecimal",
+ "common_utils",
+ "diesel_models",
+ "error-stack",
+ "external_services",
+ "futures 0.3.28",
+ "masking",
+ "once_cell",
+ "reqwest",
+ "router_env",
+ "serde",
+ "serde_json",
+ "sqlx",
+ "storage_impl",
+ "strum 0.25.0",
+ "thiserror",
+ "time",
+ "tokio 1.32.0",
+]
+
[[package]]
name = "android-tzdata"
version = "0.1.1"
@@ -381,7 +411,7 @@ dependencies = [
"router_derive",
"serde",
"serde_json",
- "strum 0.24.1",
+ "strum 0.25.0",
"time",
"url",
"utoipa",
@@ -475,14 +505,14 @@ dependencies = [
[[package]]
name = "async-bb8-diesel"
version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "779f1fa3defe66bf147fe5c811b23a02cfcaa528a25293e0b20d1911eac1fb05"
+source = "git+https://github.com/jarnura/async-bb8-diesel?rev=53b4ab901aab7635c8215fd1c2d542c8db443094#53b4ab901aab7635c8215fd1c2d542c8db443094"
dependencies = [
"async-trait",
"bb8",
"diesel",
"thiserror",
- "tokio",
+ "tokio 1.32.0",
+ "tracing",
]
[[package]]
@@ -506,7 +536,7 @@ dependencies = [
"futures-core",
"memchr",
"pin-project-lite",
- "tokio",
+ "tokio 1.32.0",
]
[[package]]
@@ -517,7 +547,7 @@ checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af"
dependencies = [
"async-lock",
"autocfg",
- "cfg-if",
+ "cfg-if 1.0.0",
"concurrent-queue",
"futures-lite",
"log",
@@ -606,8 +636,8 @@ dependencies = [
"actix-utils",
"ahash 0.7.6",
"base64 0.21.4",
- "bytes",
- "cfg-if",
+ "bytes 1.5.0",
+ "cfg-if 1.0.0",
"cookie",
"derive_more",
"futures-core",
@@ -624,7 +654,7 @@ dependencies = [
"serde",
"serde_json",
"serde_urlencoded",
- "tokio",
+ "tokio 1.32.0",
]
[[package]]
@@ -644,14 +674,14 @@ dependencies = [
"aws-smithy-json",
"aws-smithy-types",
"aws-types",
- "bytes",
+ "bytes 1.5.0",
"fastrand 1.9.0",
"hex",
"http",
"hyper",
"ring",
"time",
- "tokio",
+ "tokio 1.32.0",
"tower",
"tracing",
"zeroize",
@@ -666,7 +696,7 @@ dependencies = [
"aws-smithy-async",
"aws-smithy-types",
"fastrand 1.9.0",
- "tokio",
+ "tokio 1.32.0",
"tracing",
"zeroize",
]
@@ -695,7 +725,7 @@ dependencies = [
"aws-smithy-http",
"aws-smithy-types",
"aws-types",
- "bytes",
+ "bytes 1.5.0",
"http",
"http-body",
"lazy_static",
@@ -721,7 +751,32 @@ dependencies = [
"aws-smithy-json",
"aws-smithy-types",
"aws-types",
- "bytes",
+ "bytes 1.5.0",
+ "http",
+ "regex",
+ "tokio-stream",
+ "tower",
+ "tracing",
+]
+
+[[package]]
+name = "aws-sdk-lambda"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b3ad176ffaa3aafa532246eb6a9f18a7d68da19950704ecc95d33d9dc3c62a9b"
+dependencies = [
+ "aws-credential-types",
+ "aws-endpoint",
+ "aws-http",
+ "aws-sig-auth",
+ "aws-smithy-async",
+ "aws-smithy-client",
+ "aws-smithy-http",
+ "aws-smithy-http-tower",
+ "aws-smithy-json",
+ "aws-smithy-types",
+ "aws-types",
+ "bytes 1.5.0",
"http",
"regex",
"tokio-stream",
@@ -750,7 +805,7 @@ dependencies = [
"aws-smithy-types",
"aws-smithy-xml",
"aws-types",
- "bytes",
+ "bytes 1.5.0",
"http",
"http-body",
"once_cell",
@@ -779,7 +834,7 @@ dependencies = [
"aws-smithy-json",
"aws-smithy-types",
"aws-types",
- "bytes",
+ "bytes 1.5.0",
"http",
"regex",
"tokio-stream",
@@ -804,7 +859,7 @@ dependencies = [
"aws-smithy-json",
"aws-smithy-types",
"aws-types",
- "bytes",
+ "bytes 1.5.0",
"http",
"regex",
"tokio-stream",
@@ -831,7 +886,7 @@ dependencies = [
"aws-smithy-types",
"aws-smithy-xml",
"aws-types",
- "bytes",
+ "bytes 1.5.0",
"http",
"regex",
"tower",
@@ -861,7 +916,7 @@ checksum = "9d2ce6f507be68e968a33485ced670111d1cbad161ddbbab1e313c03d37d8f4c"
dependencies = [
"aws-smithy-eventstream",
"aws-smithy-http",
- "bytes",
+ "bytes 1.5.0",
"form_urlencoded",
"hex",
"hmac",
@@ -882,7 +937,7 @@ checksum = "13bda3996044c202d75b91afeb11a9afae9db9a721c6a7a427410018e286b880"
dependencies = [
"futures-util",
"pin-project-lite",
- "tokio",
+ "tokio 1.32.0",
"tokio-stream",
]
@@ -894,7 +949,7 @@ checksum = "07ed8b96d95402f3f6b8b57eb4e0e45ee365f78b1a924faf20ff6e97abf1eae6"
dependencies = [
"aws-smithy-http",
"aws-smithy-types",
- "bytes",
+ "bytes 1.5.0",
"crc32c",
"crc32fast",
"hex",
@@ -917,7 +972,7 @@ dependencies = [
"aws-smithy-http",
"aws-smithy-http-tower",
"aws-smithy-types",
- "bytes",
+ "bytes 1.5.0",
"fastrand 1.9.0",
"http",
"http-body",
@@ -926,7 +981,7 @@ dependencies = [
"lazy_static",
"pin-project-lite",
"rustls 0.20.9",
- "tokio",
+ "tokio 1.32.0",
"tower",
"tracing",
]
@@ -938,7 +993,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "460c8da5110835e3d9a717c61f5556b20d03c32a1dec57f8fc559b360f733bb8"
dependencies = [
"aws-smithy-types",
- "bytes",
+ "bytes 1.5.0",
"crc32fast",
]
@@ -950,7 +1005,7 @@ checksum = "2b3b693869133551f135e1f2c77cb0b8277d9e3e17feaf2213f735857c4f0d28"
dependencies = [
"aws-smithy-eventstream",
"aws-smithy-types",
- "bytes",
+ "bytes 1.5.0",
"bytes-utils",
"futures-core",
"http",
@@ -960,7 +1015,7 @@ dependencies = [
"percent-encoding",
"pin-project-lite",
"pin-utils",
- "tokio",
+ "tokio 1.32.0",
"tokio-util",
"tracing",
]
@@ -973,7 +1028,7 @@ checksum = "3ae4f6c5798a247fac98a867698197d9ac22643596dc3777f0c76b91917616b9"
dependencies = [
"aws-smithy-http",
"aws-smithy-types",
- "bytes",
+ "bytes 1.5.0",
"http",
"http-body",
"pin-project-lite",
@@ -1034,7 +1089,7 @@ dependencies = [
"aws-smithy-http",
"aws-smithy-types",
"http",
- "rustc_version",
+ "rustc_version 0.4.0",
"tracing",
]
@@ -1047,7 +1102,7 @@ dependencies = [
"async-trait",
"axum-core",
"bitflags 1.3.2",
- "bytes",
+ "bytes 1.5.0",
"futures-util",
"http",
"http-body",
@@ -1073,7 +1128,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c"
dependencies = [
"async-trait",
- "bytes",
+ "bytes 1.5.0",
"futures-util",
"http",
"http-body",
@@ -1091,7 +1146,7 @@ checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837"
dependencies = [
"addr2line",
"cc",
- "cfg-if",
+ "cfg-if 1.0.0",
"libc",
"miniz_oxide 0.7.1",
"object",
@@ -1136,7 +1191,7 @@ dependencies = [
"futures-channel",
"futures-util",
"parking_lot 0.12.1",
- "tokio",
+ "tokio 1.32.0",
]
[[package]]
@@ -1148,6 +1203,7 @@ dependencies = [
"num-bigint",
"num-integer",
"num-traits",
+ "serde",
]
[[package]]
@@ -1186,6 +1242,18 @@ version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635"
+[[package]]
+name = "bitvec"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c"
+dependencies = [
+ "funty",
+ "radium",
+ "tap",
+ "wyz",
+]
+
[[package]]
name = "blake2"
version = "0.10.6"
@@ -1204,7 +1272,7 @@ dependencies = [
"arrayref",
"arrayvec",
"cc",
- "cfg-if",
+ "cfg-if 1.0.0",
"constant_time_eq",
"digest 0.10.7",
]
@@ -1227,6 +1295,30 @@ dependencies = [
"generic-array",
]
+[[package]]
+name = "borsh"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bf617fabf5cdbdc92f774bfe5062d870f228b80056d41180797abf48bed4056e"
+dependencies = [
+ "borsh-derive",
+ "cfg_aliases",
+]
+
+[[package]]
+name = "borsh-derive"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f404657a7ea7b5249e36808dff544bc88a28f26e0ac40009f674b7a009d14be3"
+dependencies = [
+ "once_cell",
+ "proc-macro-crate 2.0.0",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.38",
+ "syn_derive",
+]
+
[[package]]
name = "brotli"
version = "3.4.0"
@@ -1264,6 +1356,28 @@ version = "3.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec"
+[[package]]
+name = "bytecheck"
+version = "0.6.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b6372023ac861f6e6dc89c8344a8f398fb42aaba2b5dbc649ca0c0e9dbcb627"
+dependencies = [
+ "bytecheck_derive",
+ "ptr_meta",
+ "simdutf8",
+]
+
+[[package]]
+name = "bytecheck_derive"
+version = "0.6.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a7ec4c6f261935ad534c0c22dbef2201b45918860eb1c574b972bd213a76af61"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+]
+
[[package]]
name = "bytecount"
version = "0.6.4"
@@ -1282,6 +1396,16 @@ version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
+[[package]]
+name = "bytes"
+version = "0.4.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c"
+dependencies = [
+ "byteorder",
+ "iovec",
+]
+
[[package]]
name = "bytes"
version = "1.5.0"
@@ -1294,7 +1418,7 @@ version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e47d3a8076e283f3acd27400535992edb3ba4b5bb72f8891ad8fbe7932a7d4b9"
dependencies = [
- "bytes",
+ "bytes 1.5.0",
"either",
]
@@ -1304,7 +1428,7 @@ version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "238e4886760d98c4f899360c834fa93e62cf7f721ac3c2da375cbdf4b8679aae"
dependencies = [
- "bytes",
+ "bytes 1.5.0",
]
[[package]]
@@ -1347,7 +1471,7 @@ checksum = "4acbb09d9ee8e23699b9634375c72795d095bf268439da88562cf9b501f181fa"
dependencies = [
"camino",
"cargo-platform",
- "semver",
+ "semver 1.0.19",
"serde",
"serde_json",
]
@@ -1360,7 +1484,7 @@ checksum = "eee4243f1f26fc7a42710e7439c149e2b10b05472f88090acce52632f231a73a"
dependencies = [
"camino",
"cargo-platform",
- "semver",
+ "semver 1.0.19",
"serde",
"serde_json",
"thiserror",
@@ -1393,12 +1517,24 @@ dependencies = [
"uuid",
]
+[[package]]
+name = "cfg-if"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
+
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+[[package]]
+name = "cfg_aliases"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e"
+
[[package]]
name = "checked_int_cast"
version = "1.0.0"
@@ -1509,6 +1645,15 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b"
+[[package]]
+name = "cloudabi"
+version = "0.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
+dependencies = [
+ "bitflags 1.3.2",
+]
+
[[package]]
name = "color_quant"
version = "1.1.0"
@@ -1532,12 +1677,12 @@ name = "common_utils"
version = "0.1.0"
dependencies = [
"async-trait",
- "bytes",
+ "bytes 1.5.0",
"common_enums",
"diesel",
"error-stack",
"fake",
- "futures",
+ "futures 0.3.28",
"hex",
"http",
"masking",
@@ -1562,7 +1707,7 @@ dependencies = [
"test-case",
"thiserror",
"time",
- "tokio",
+ "tokio 1.32.0",
]
[[package]]
@@ -1571,7 +1716,7 @@ version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f057a694a54f12365049b0958a1685bb52d567f5593b355fbf685838e873d400"
dependencies = [
- "crossbeam-utils",
+ "crossbeam-utils 0.8.16",
]
[[package]]
@@ -1674,7 +1819,7 @@ version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d8f48d60e5b4d2c53d5c2b1d8a58c849a70ae5e5509b08a48d047e3b65714a74"
dependencies = [
- "rustc_version",
+ "rustc_version 0.4.0",
]
[[package]]
@@ -1683,7 +1828,7 @@ version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
]
[[package]]
@@ -1728,8 +1873,19 @@ version = "0.5.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200"
dependencies = [
- "cfg-if",
- "crossbeam-utils",
+ "cfg-if 1.0.0",
+ "crossbeam-utils 0.8.16",
+]
+
+[[package]]
+name = "crossbeam-deque"
+version = "0.7.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c20ff29ded3204c5106278a81a38f4b482636ed4fa1e6cfbeef193291beb29ed"
+dependencies = [
+ "crossbeam-epoch 0.8.2",
+ "crossbeam-utils 0.7.2",
+ "maybe-uninit",
]
[[package]]
@@ -1738,9 +1894,24 @@ version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef"
dependencies = [
- "cfg-if",
- "crossbeam-epoch",
- "crossbeam-utils",
+ "cfg-if 1.0.0",
+ "crossbeam-epoch 0.9.15",
+ "crossbeam-utils 0.8.16",
+]
+
+[[package]]
+name = "crossbeam-epoch"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "058ed274caafc1f60c4997b5fc07bf7dc7cca454af7c6e81edffe5f33f70dace"
+dependencies = [
+ "autocfg",
+ "cfg-if 0.1.10",
+ "crossbeam-utils 0.7.2",
+ "lazy_static",
+ "maybe-uninit",
+ "memoffset 0.5.6",
+ "scopeguard",
]
[[package]]
@@ -1750,20 +1921,42 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7"
dependencies = [
"autocfg",
- "cfg-if",
- "crossbeam-utils",
- "memoffset",
+ "cfg-if 1.0.0",
+ "crossbeam-utils 0.8.16",
+ "memoffset 0.9.0",
"scopeguard",
]
+[[package]]
+name = "crossbeam-queue"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "774ba60a54c213d409d5353bda12d49cd68d14e45036a285234c8d6f91f92570"
+dependencies = [
+ "cfg-if 0.1.10",
+ "crossbeam-utils 0.7.2",
+ "maybe-uninit",
+]
+
[[package]]
name = "crossbeam-queue"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add"
dependencies = [
- "cfg-if",
- "crossbeam-utils",
+ "cfg-if 1.0.0",
+ "crossbeam-utils 0.8.16",
+]
+
+[[package]]
+name = "crossbeam-utils"
+version = "0.7.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8"
+dependencies = [
+ "autocfg",
+ "cfg-if 0.1.10",
+ "lazy_static",
]
[[package]]
@@ -1772,7 +1965,7 @@ version = "0.8.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
]
[[package]]
@@ -1786,13 +1979,14 @@ dependencies = [
]
[[package]]
-name = "darling"
-version = "0.14.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7b750cb3417fd1b327431a470f388520309479ab0bf5e323505daf0290cd3850"
+name = "currency_conversion"
+version = "0.1.0"
dependencies = [
- "darling_core 0.14.4",
- "darling_macro 0.14.4",
+ "common_enums",
+ "rust_decimal",
+ "rusty-money",
+ "serde",
+ "thiserror",
]
[[package]]
@@ -1801,22 +1995,8 @@ version = "0.20.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0209d94da627ab5605dcccf08bb18afa5009cfbef48d8a8b7d7bdbc79be25c5e"
dependencies = [
- "darling_core 0.20.3",
- "darling_macro 0.20.3",
-]
-
-[[package]]
-name = "darling_core"
-version = "0.14.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0"
-dependencies = [
- "fnv",
- "ident_case",
- "proc-macro2",
- "quote",
- "strsim",
- "syn 1.0.109",
+ "darling_core",
+ "darling_macro",
]
[[package]]
@@ -1833,24 +2013,13 @@ dependencies = [
"syn 2.0.38",
]
-[[package]]
-name = "darling_macro"
-version = "0.14.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e"
-dependencies = [
- "darling_core 0.14.4",
- "quote",
- "syn 1.0.109",
-]
-
[[package]]
name = "darling_macro"
version = "0.20.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5"
dependencies = [
- "darling_core 0.20.3",
+ "darling_core",
"quote",
"syn 2.0.38",
]
@@ -1861,9 +2030,9 @@ version = "5.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
"hashbrown 0.14.1",
- "lock_api",
+ "lock_api 0.4.10",
"once_cell",
"parking_lot_core 0.9.8",
]
@@ -1900,7 +2069,7 @@ dependencies = [
"deadpool-runtime",
"num_cpus",
"retain_mut",
- "tokio",
+ "tokio 1.32.0",
]
[[package]]
@@ -1953,7 +2122,7 @@ dependencies = [
"convert_case",
"proc-macro2",
"quote",
- "rustc_version",
+ "rustc_version 0.4.0",
"syn 1.0.109",
]
@@ -2064,7 +2233,7 @@ checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6"
dependencies = [
"libc",
"redox_users",
- "winapi",
+ "winapi 0.3.9",
]
[[package]]
@@ -2111,7 +2280,7 @@ dependencies = [
"serde_json",
"serde_path_to_error",
"thiserror",
- "tokio",
+ "tokio 1.32.0",
]
[[package]]
@@ -2132,7 +2301,7 @@ version = "0.8.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
]
[[package]]
@@ -2187,7 +2356,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f00447f331c7f726db5b8532ebc9163519eed03c6d7c8b73c90b3ff5646ac85"
dependencies = [
"anyhow",
- "rustc_version",
+ "rustc_version 0.4.0",
"serde",
]
@@ -2226,6 +2395,8 @@ name = "euclid_wasm"
version = "0.1.0"
dependencies = [
"api_models",
+ "common_enums",
+ "currency_conversion",
"euclid",
"getrandom 0.2.10",
"kgraph_utils",
@@ -2251,17 +2422,20 @@ dependencies = [
"aws-config",
"aws-sdk-kms",
"aws-sdk-sesv2",
+ "aws-sdk-sts",
"aws-smithy-client",
"base64 0.21.4",
"common_utils",
"dyn-clone",
"error-stack",
+ "hyper",
+ "hyper-proxy",
"masking",
"once_cell",
"router_env",
"serde",
"thiserror",
- "tokio",
+ "tokio 1.32.0",
]
[[package]]
@@ -2291,7 +2465,7 @@ dependencies = [
"serde",
"serde_json",
"time",
- "tokio",
+ "tokio 1.32.0",
"url",
"webdriver",
]
@@ -2375,19 +2549,19 @@ dependencies = [
"arc-swap",
"arcstr",
"async-trait",
- "bytes",
+ "bytes 1.5.0",
"bytes-utils",
- "cfg-if",
+ "cfg-if 1.0.0",
"float-cmp",
- "futures",
+ "futures 0.3.28",
"lazy_static",
"log",
"parking_lot 0.12.1",
"rand 0.8.5",
"redis-protocol",
- "semver",
+ "semver 1.0.19",
"sha-1 0.10.1",
- "tokio",
+ "tokio 1.32.0",
"tokio-stream",
"tokio-util",
"tracing",
@@ -2447,6 +2621,34 @@ dependencies = [
"syn 2.0.38",
]
+[[package]]
+name = "fuchsia-zircon"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
+dependencies = [
+ "bitflags 1.3.2",
+ "fuchsia-zircon-sys",
+]
+
+[[package]]
+name = "fuchsia-zircon-sys"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
+
+[[package]]
+name = "funty"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
+
+[[package]]
+name = "futures"
+version = "0.1.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678"
+
[[package]]
name = "futures"
version = "0.3.28"
@@ -2496,7 +2698,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a604f7a68fbf8103337523b1fadc8ade7361ee3f112f7c680ad179651616aed5"
dependencies = [
"futures-core",
- "lock_api",
+ "lock_api 0.4.10",
"parking_lot 0.11.2",
]
@@ -2594,7 +2796,7 @@ version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
"libc",
"wasi 0.9.0+wasi-snapshot-preview1",
]
@@ -2605,7 +2807,7 @@ version = "0.2.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
"js-sys",
"libc",
"wasi 0.11.0+wasi-snapshot-preview1",
@@ -2677,7 +2879,7 @@ version = "0.3.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833"
dependencies = [
- "bytes",
+ "bytes 1.5.0",
"fnv",
"futures-core",
"futures-sink",
@@ -2685,7 +2887,7 @@ dependencies = [
"http",
"indexmap 1.9.3",
"slab",
- "tokio",
+ "tokio 1.32.0",
"tokio-util",
"tracing",
]
@@ -2724,6 +2926,30 @@ dependencies = [
"hashbrown 0.14.1",
]
+[[package]]
+name = "headers"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06683b93020a07e3dbcf5f8c0f6d40080d725bea7936fc01ad345c01b97dc270"
+dependencies = [
+ "base64 0.21.4",
+ "bytes 1.5.0",
+ "headers-core",
+ "http",
+ "httpdate",
+ "mime",
+ "sha1",
+]
+
+[[package]]
+name = "headers-core"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429"
+dependencies = [
+ "http",
+]
+
[[package]]
name = "heck"
version = "0.4.1"
@@ -2769,7 +2995,7 @@ version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482"
dependencies = [
- "bytes",
+ "bytes 1.5.0",
"fnv",
"itoa",
]
@@ -2780,7 +3006,7 @@ version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1"
dependencies = [
- "bytes",
+ "bytes 1.5.0",
"http",
"pin-project-lite",
]
@@ -2833,7 +3059,7 @@ version = "0.14.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468"
dependencies = [
- "bytes",
+ "bytes 1.5.0",
"futures-channel",
"futures-core",
"futures-util",
@@ -2845,12 +3071,30 @@ dependencies = [
"itoa",
"pin-project-lite",
"socket2 0.4.9",
- "tokio",
+ "tokio 1.32.0",
"tower-service",
"tracing",
"want",
]
+[[package]]
+name = "hyper-proxy"
+version = "0.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ca815a891b24fdfb243fa3239c86154392b0953ee584aa1a2a1f66d20cbe75cc"
+dependencies = [
+ "bytes 1.5.0",
+ "futures 0.3.28",
+ "headers",
+ "http",
+ "hyper",
+ "hyper-tls",
+ "native-tls",
+ "tokio 1.32.0",
+ "tokio-native-tls",
+ "tower-service",
+]
+
[[package]]
name = "hyper-rustls"
version = "0.23.2"
@@ -2862,7 +3106,7 @@ dependencies = [
"log",
"rustls 0.20.9",
"rustls-native-certs",
- "tokio",
+ "tokio 1.32.0",
"tokio-rustls",
]
@@ -2874,7 +3118,7 @@ checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1"
dependencies = [
"hyper",
"pin-project-lite",
- "tokio",
+ "tokio 1.32.0",
"tokio-io-timeout",
]
@@ -2884,10 +3128,10 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905"
dependencies = [
- "bytes",
+ "bytes 1.5.0",
"hyper",
"native-tls",
- "tokio",
+ "tokio 1.32.0",
"tokio-native-tls",
]
@@ -3015,7 +3259,7 @@ version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
]
[[package]]
@@ -3029,6 +3273,15 @@ dependencies = [
"windows-sys",
]
+[[package]]
+name = "iovec"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e"
+dependencies = [
+ "libc",
+]
+
[[package]]
name = "ipnet"
version = "2.8.0"
@@ -3140,11 +3393,22 @@ dependencies = [
"simple_asn1",
]
+[[package]]
+name = "kernel32-sys"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
+dependencies = [
+ "winapi 0.2.8",
+ "winapi-build",
+]
+
[[package]]
name = "kgraph_utils"
version = "0.1.0"
dependencies = [
"api_models",
+ "common_enums",
"criterion",
"euclid",
"masking",
@@ -3246,6 +3510,15 @@ version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e34f76eb3611940e0e7d53a9aaa4e6a3151f69541a282fd0dad5571420c53ff1"
+[[package]]
+name = "lock_api"
+version = "0.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c4da24a77a3d8a6d4862d95f72e6fdb9c09a643ecdb402d754004a557f2bec75"
+dependencies = [
+ "scopeguard",
+]
+
[[package]]
name = "lock_api"
version = "0.4.10"
@@ -3293,7 +3566,7 @@ dependencies = [
name = "masking"
version = "0.1.0"
dependencies = [
- "bytes",
+ "bytes 1.5.0",
"diesel",
"serde",
"serde_json",
@@ -3340,13 +3613,19 @@ dependencies = [
"syn 1.0.109",
]
+[[package]]
+name = "maybe-uninit"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00"
+
[[package]]
name = "md-5"
version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
"digest 0.10.7",
]
@@ -3362,6 +3641,15 @@ version = "2.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167"
+[[package]]
+name = "memoffset"
+version = "0.5.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "043175f069eda7b85febe4a74abbaeff828d9f8b448515d3151a14a3542811aa"
+dependencies = [
+ "autocfg",
+]
+
[[package]]
name = "memoffset"
version = "0.9.0"
@@ -3432,16 +3720,58 @@ dependencies = [
[[package]]
name = "mio"
-version = "0.8.8"
+version = "0.6.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2"
+checksum = "4afd66f5b91bf2a3bc13fad0e21caedac168ca4c707504e75585648ae80e4cc4"
dependencies = [
+ "cfg-if 0.1.10",
+ "fuchsia-zircon",
+ "fuchsia-zircon-sys",
+ "iovec",
+ "kernel32-sys",
"libc",
"log",
- "wasi 0.11.0+wasi-snapshot-preview1",
+ "miow",
+ "net2",
+ "slab",
+ "winapi 0.2.8",
+]
+
+[[package]]
+name = "mio"
+version = "0.8.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2"
+dependencies = [
+ "libc",
+ "log",
+ "wasi 0.11.0+wasi-snapshot-preview1",
"windows-sys",
]
+[[package]]
+name = "mio-uds"
+version = "0.6.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "afcb699eb26d4332647cc848492bbc15eafb26f08d0304550d5aa1f612e066f0"
+dependencies = [
+ "iovec",
+ "libc",
+ "mio 0.6.23",
+]
+
+[[package]]
+name = "miow"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ebd808424166322d4a38da87083bfddd3ac4c131334ed55856112eb06d46944d"
+dependencies = [
+ "kernel32-sys",
+ "net2",
+ "winapi 0.2.8",
+ "ws2_32-sys",
+]
+
[[package]]
name = "moka"
version = "0.11.3"
@@ -3451,22 +3781,28 @@ dependencies = [
"async-io",
"async-lock",
"crossbeam-channel",
- "crossbeam-epoch",
- "crossbeam-utils",
+ "crossbeam-epoch 0.9.15",
+ "crossbeam-utils 0.8.16",
"futures-util",
"once_cell",
"parking_lot 0.12.1",
"quanta",
- "rustc_version",
+ "rustc_version 0.4.0",
"scheduled-thread-pool",
"skeptic",
- "smallvec",
+ "smallvec 1.11.1",
"tagptr",
"thiserror",
"triomphe",
"uuid",
]
+[[package]]
+name = "mutually_exclusive_features"
+version = "0.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d02c0b00610773bb7fc61d85e13d86c7858cbdf00e1a120bfc41bc055dbaa0e"
+
[[package]]
name = "nanoid"
version = "0.4.0"
@@ -3494,6 +3830,17 @@ dependencies = [
"tempfile",
]
+[[package]]
+name = "net2"
+version = "0.2.39"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b13b648036a2339d06de780866fbdfda0dde886de7b3af2ddeba8b14f4ee34ac"
+dependencies = [
+ "cfg-if 0.1.10",
+ "libc",
+ "winapi 0.3.9",
+]
+
[[package]]
name = "nom"
version = "7.1.3"
@@ -3511,7 +3858,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
dependencies = [
"overload",
- "winapi",
+ "winapi 0.3.9",
]
[[package]]
@@ -3577,6 +3924,27 @@ dependencies = [
"libc",
]
+[[package]]
+name = "num_enum"
+version = "0.5.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9"
+dependencies = [
+ "num_enum_derive",
+]
+
+[[package]]
+name = "num_enum_derive"
+version = "0.5.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799"
+dependencies = [
+ "proc-macro-crate 1.3.1",
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+]
+
[[package]]
name = "object"
version = "0.32.1"
@@ -3621,12 +3989,12 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
[[package]]
name = "openssl"
-version = "0.10.57"
+version = "0.10.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bac25ee399abb46215765b1cb35bc0212377e58a061560d8b29b024fd0430e7c"
+checksum = "79a4c6c3a2b158f7f8f2a2fc5a969fa3a068df6fc9dbb4a43845436e3af7c800"
dependencies = [
"bitflags 2.4.0",
- "cfg-if",
+ "cfg-if 1.0.0",
"foreign-types",
"libc",
"once_cell",
@@ -3653,9 +4021,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
[[package]]
name = "openssl-sys"
-version = "0.9.93"
+version = "0.9.96"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "db4d56a4c0478783083cfafcc42493dd4a981d41669da64b4572a2a089b51b1d"
+checksum = "3812c071ba60da8b5677cc12bcb1d42989a65553772897a7e0355545a819838f"
dependencies = [
"cc",
"libc",
@@ -3680,14 +4048,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8af72d59a4484654ea8eb183fea5ae4eb6a41d7ac3e3bae5f4d2a282a3a7d3ca"
dependencies = [
"async-trait",
- "futures",
+ "futures 0.3.28",
"futures-util",
"http",
"opentelemetry",
"opentelemetry-proto",
"prost",
"thiserror",
- "tokio",
+ "tokio 1.32.0",
"tonic",
]
@@ -3697,7 +4065,7 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "045f8eea8c0fa19f7d48e7bc3128a39c2e5c533d5c61298c548dfefc1064474c"
dependencies = [
- "futures",
+ "futures 0.3.28",
"futures-util",
"opentelemetry",
"prost",
@@ -3738,7 +4106,7 @@ dependencies = [
"percent-encoding",
"rand 0.8.5",
"thiserror",
- "tokio",
+ "tokio 1.32.0",
"tokio-stream",
]
@@ -3770,6 +4138,17 @@ version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e52c774a4c39359c1d1c52e43f73dd91a75a614652c825408eec30c95a9b2067"
+[[package]]
+name = "parking_lot"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f842b1982eb6c2fe34036a4fbfb06dd185a3f5c8edfaacdf7d1ea10b07de6252"
+dependencies = [
+ "lock_api 0.3.4",
+ "parking_lot_core 0.6.3",
+ "rustc_version 0.2.3",
+]
+
[[package]]
name = "parking_lot"
version = "0.11.2"
@@ -3777,7 +4156,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
dependencies = [
"instant",
- "lock_api",
+ "lock_api 0.4.10",
"parking_lot_core 0.8.6",
]
@@ -3787,22 +4166,37 @@ version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
dependencies = [
- "lock_api",
+ "lock_api 0.4.10",
"parking_lot_core 0.9.8",
]
+[[package]]
+name = "parking_lot_core"
+version = "0.6.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bda66b810a62be75176a80873726630147a5ca780cd33921e0b5709033e66b0a"
+dependencies = [
+ "cfg-if 0.1.10",
+ "cloudabi",
+ "libc",
+ "redox_syscall 0.1.57",
+ "rustc_version 0.2.3",
+ "smallvec 0.6.14",
+ "winapi 0.3.9",
+]
+
[[package]]
name = "parking_lot_core"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
"instant",
"libc",
"redox_syscall 0.2.16",
- "smallvec",
- "winapi",
+ "smallvec 1.11.1",
+ "winapi 0.3.9",
]
[[package]]
@@ -3811,10 +4205,10 @@ version = "0.9.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
"libc",
"redox_syscall 0.3.5",
- "smallvec",
+ "smallvec 1.11.1",
"windows-targets",
]
@@ -4061,7 +4455,7 @@ checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce"
dependencies = [
"autocfg",
"bitflags 1.3.2",
- "cfg-if",
+ "cfg-if 1.0.0",
"concurrent-queue",
"libc",
"log",
@@ -4084,6 +4478,25 @@ dependencies = [
"vcpkg",
]
+[[package]]
+name = "proc-macro-crate"
+version = "1.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919"
+dependencies = [
+ "once_cell",
+ "toml_edit 0.19.10",
+]
+
+[[package]]
+name = "proc-macro-crate"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8"
+dependencies = [
+ "toml_edit 0.20.2",
+]
+
[[package]]
name = "proc-macro-error"
version = "1.0.4"
@@ -4143,7 +4556,7 @@ version = "0.11.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd"
dependencies = [
- "bytes",
+ "bytes 1.5.0",
"prost-derive",
]
@@ -4160,6 +4573,26 @@ dependencies = [
"syn 1.0.109",
]
+[[package]]
+name = "ptr_meta"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0738ccf7ea06b608c10564b31debd4f5bc5e197fc8bfe088f68ae5ce81e7a4f1"
+dependencies = [
+ "ptr_meta_derive",
+]
+
+[[package]]
+name = "ptr_meta_derive"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+]
+
[[package]]
name = "pulldown-cmark"
version = "0.9.3"
@@ -4187,14 +4620,14 @@ version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a17e662a7a8291a865152364c20c7abc5e60486ab2001e8ec10b24862de0b9ab"
dependencies = [
- "crossbeam-utils",
+ "crossbeam-utils 0.8.16",
"libc",
"mach2",
"once_cell",
"raw-cpuid",
"wasi 0.11.0+wasi-snapshot-preview1",
"web-sys",
- "winapi",
+ "winapi 0.3.9",
]
[[package]]
@@ -4233,6 +4666,12 @@ dependencies = [
"scheduled-thread-pool",
]
+[[package]]
+name = "radium"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09"
+
[[package]]
name = "rand"
version = "0.7.3"
@@ -4338,8 +4777,38 @@ version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed"
dependencies = [
- "crossbeam-deque",
- "crossbeam-utils",
+ "crossbeam-deque 0.8.3",
+ "crossbeam-utils 0.8.16",
+]
+
+[[package]]
+name = "rdkafka"
+version = "0.36.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d54f02a5a40220f8a2dfa47ddb38ba9064475a5807a69504b6f91711df2eea63"
+dependencies = [
+ "futures-channel",
+ "futures-util",
+ "libc",
+ "log",
+ "rdkafka-sys",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "slab",
+ "tokio 1.32.0",
+]
+
+[[package]]
+name = "rdkafka-sys"
+version = "4.7.0+2.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "55e0d2f9ba6253f6ec72385e453294f8618e9e15c2c6aba2a5c01ccf9622d615"
+dependencies = [
+ "libc",
+ "libz-sys",
+ "num_enum",
+ "pkg-config",
]
[[package]]
@@ -4348,7 +4817,7 @@ version = "4.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c31deddf734dc0a39d3112e73490e88b61a05e83e074d211f348404cee4d2c6"
dependencies = [
- "bytes",
+ "bytes 1.5.0",
"bytes-utils",
"cookie-factory",
"crc16",
@@ -4363,13 +4832,19 @@ dependencies = [
"common_utils",
"error-stack",
"fred",
- "futures",
+ "futures 0.3.28",
"router_env",
"serde",
"thiserror",
- "tokio",
+ "tokio 1.32.0",
]
+[[package]]
+name = "redox_syscall"
+version = "0.1.57"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce"
+
[[package]]
name = "redox_syscall"
version = "0.2.16"
@@ -4455,6 +4930,15 @@ version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da"
+[[package]]
+name = "rend"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a2571463863a6bd50c32f94402933f03457a3fbaf697a707c5be741e459f08fd"
+dependencies = [
+ "bytecheck",
+]
+
[[package]]
name = "reqwest"
version = "0.11.22"
@@ -4463,7 +4947,7 @@ checksum = "046cd98826c46c2ac8ddecae268eb5c2e58628688a5fc7a2643704a73faba95b"
dependencies = [
"async-compression",
"base64 0.21.4",
- "bytes",
+ "bytes 1.5.0",
"encoding_rs",
"futures-core",
"futures-util",
@@ -4485,7 +4969,7 @@ dependencies = [
"serde_json",
"serde_urlencoded",
"system-configuration",
- "tokio",
+ "tokio 1.32.0",
"tokio-native-tls",
"tokio-util",
"tower-service",
@@ -4514,7 +4998,35 @@ dependencies = [
"spin",
"untrusted",
"web-sys",
- "winapi",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "rkyv"
+version = "0.7.42"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0200c8230b013893c0b2d6213d6ec64ed2b9be2e0e016682b7224ff82cff5c58"
+dependencies = [
+ "bitvec",
+ "bytecheck",
+ "hashbrown 0.12.3",
+ "ptr_meta",
+ "rend",
+ "rkyv_derive",
+ "seahash",
+ "tinyvec",
+ "uuid",
+]
+
+[[package]]
+name = "rkyv_derive"
+version = "0.7.42"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b2e06b915b5c230a17d7a736d1e2e63ee753c256a8614ef3f5147b13a4f5541d"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
]
[[package]]
@@ -4550,6 +5062,7 @@ dependencies = [
"actix-multipart",
"actix-rt",
"actix-web",
+ "analytics",
"api_models",
"argon2",
"async-bb8-diesel",
@@ -4561,12 +5074,13 @@ dependencies = [
"bb8",
"bigdecimal",
"blake3",
- "bytes",
+ "bytes 1.5.0",
"cards",
"clap",
"common_enums",
"common_utils",
"config",
+ "currency_conversion",
"data_models",
"derive_deref",
"diesel",
@@ -4574,10 +5088,11 @@ dependencies = [
"digest 0.9.0",
"dyn-clone",
"encoding_rs",
+ "erased-serde",
"error-stack",
"euclid",
"external_services",
- "futures",
+ "futures 0.3.28",
"hex",
"http",
"hyper",
@@ -4597,6 +5112,7 @@ dependencies = [
"qrcode",
"rand 0.8.5",
"rand_chacha 0.3.1",
+ "rdkafka",
"redis_interface",
"regex",
"reqwest",
@@ -4604,6 +5120,7 @@ dependencies = [
"router_derive",
"router_env",
"roxmltree",
+ "rust_decimal",
"rustc-hash",
"scheduler",
"serde",
@@ -4616,12 +5133,13 @@ dependencies = [
"sha-1 0.9.8",
"sqlx",
"storage_impl",
- "strum 0.24.1",
+ "strum 0.25.0",
"tera",
"test_utils",
"thiserror",
"time",
- "tokio",
+ "tokio 1.32.0",
+ "tracing-futures",
"unicode-segmentation",
"url",
"utoipa",
@@ -4636,7 +5154,6 @@ dependencies = [
name = "router_derive"
version = "0.1.0"
dependencies = [
- "darling 0.14.4",
"diesel",
"indexmap 2.0.2",
"proc-macro2",
@@ -4644,7 +5161,7 @@ dependencies = [
"serde",
"serde_json",
"strum 0.24.1",
- "syn 1.0.109",
+ "syn 2.0.38",
]
[[package]]
@@ -4664,7 +5181,7 @@ dependencies = [
"serde_path_to_error",
"strum 0.24.1",
"time",
- "tokio",
+ "tokio 1.32.0",
"tracing",
"tracing-actix-web",
"tracing-appender",
@@ -4724,10 +5241,36 @@ version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6d5f2436026b4f6e79dc829837d467cc7e9a55ee40e750d716713540715a2df"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
"ordered-multimap",
]
+[[package]]
+name = "rust_decimal"
+version = "1.33.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06676aec5ccb8fc1da723cc8c0f9a46549f21ebb8753d3915c6c41db1e7f1dc4"
+dependencies = [
+ "arrayvec",
+ "borsh",
+ "bytes 1.5.0",
+ "num-traits",
+ "rand 0.8.5",
+ "rkyv",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "rust_decimal_macros"
+version = "1.33.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e43721f4ef7060ebc2c3ede757733209564ca8207f47674181bcd425dd76945"
+dependencies = [
+ "quote",
+ "rust_decimal",
+]
+
[[package]]
name = "rustc-demangle"
version = "0.1.23"
@@ -4740,13 +5283,22 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
+[[package]]
+name = "rustc_version"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
+dependencies = [
+ "semver 0.9.0",
+]
+
[[package]]
name = "rustc_version"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
dependencies = [
- "semver",
+ "semver 1.0.19",
]
[[package]]
@@ -4858,6 +5410,16 @@ dependencies = [
"wait-timeout",
]
+[[package]]
+name = "rusty-money"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5b28f881005eac7ad8d46b6f075da5f322bd7f4f83a38720fc069694ddadd683"
+dependencies = [
+ "rust_decimal",
+ "rust_decimal_macros",
+]
+
[[package]]
name = "ryu"
version = "1.0.15"
@@ -4900,7 +5462,7 @@ dependencies = [
"diesel_models",
"error-stack",
"external_services",
- "futures",
+ "futures 0.3.28",
"masking",
"once_cell",
"rand 0.8.5",
@@ -4912,7 +5474,7 @@ dependencies = [
"strum 0.24.1",
"thiserror",
"time",
- "tokio",
+ "tokio 1.32.0",
"uuid",
]
@@ -4938,6 +5500,12 @@ dependencies = [
"untrusted",
]
+[[package]]
+name = "seahash"
+version = "4.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
+
[[package]]
name = "security-framework"
version = "2.9.2"
@@ -4961,6 +5529,15 @@ dependencies = [
"libc",
]
+[[package]]
+name = "semver"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
+dependencies = [
+ "semver-parser",
+]
+
[[package]]
name = "semver"
version = "1.0.19"
@@ -4970,6 +5547,12 @@ dependencies = [
"serde",
]
+[[package]]
+name = "semver-parser"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
+
[[package]]
name = "serde"
version = "1.0.188"
@@ -5109,7 +5692,7 @@ version = "3.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e6be15c453eb305019bfa438b1593c731f36a289a7853f7707ee29e870b3b3c"
dependencies = [
- "darling 0.20.3",
+ "darling",
"proc-macro2",
"quote",
"syn 2.0.38",
@@ -5122,7 +5705,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e56dd856803e253c8f298af3f4d7eb0ae5e23a737252cd90bb4f3b435033b2d"
dependencies = [
"dashmap",
- "futures",
+ "futures 0.3.28",
"lazy_static",
"log",
"parking_lot 0.12.1",
@@ -5147,7 +5730,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6"
dependencies = [
"block-buffer 0.9.0",
- "cfg-if",
+ "cfg-if 1.0.0",
"cpufeatures",
"digest 0.9.0",
"opaque-debug",
@@ -5159,7 +5742,7 @@ version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
"cpufeatures",
"digest 0.10.7",
]
@@ -5170,7 +5753,7 @@ version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
"cpufeatures",
"digest 0.10.7",
]
@@ -5181,7 +5764,7 @@ version = "0.10.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
"cpufeatures",
"digest 0.10.7",
]
@@ -5232,9 +5815,15 @@ dependencies = [
"futures-core",
"libc",
"signal-hook",
- "tokio",
+ "tokio 1.32.0",
]
+[[package]]
+name = "simdutf8"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f27f6278552951f1f2b8cf9da965d10969b2efdea95a6ec47987ab46edfe263a"
+
[[package]]
name = "simple_asn1"
version = "0.6.2"
@@ -5286,6 +5875,15 @@ dependencies = [
"deunicode",
]
+[[package]]
+name = "smallvec"
+version = "0.6.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b97fcaeba89edba30f044a10c6a3cc39df9c3f17d7cd829dd1446cab35f890e0"
+dependencies = [
+ "maybe-uninit",
+]
+
[[package]]
name = "smallvec"
version = "1.11.1"
@@ -5299,7 +5897,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662"
dependencies = [
"libc",
- "winapi",
+ "winapi 0.3.9",
]
[[package]]
@@ -5351,9 +5949,9 @@ dependencies = [
"bigdecimal",
"bitflags 1.3.2",
"byteorder",
- "bytes",
+ "bytes 1.5.0",
"crc",
- "crossbeam-queue",
+ "crossbeam-queue 0.3.8",
"dirs",
"dotenvy",
"either",
@@ -5381,7 +5979,7 @@ dependencies = [
"serde_json",
"sha1",
"sha2",
- "smallvec",
+ "smallvec 1.11.1",
"sqlformat",
"sqlx-rt",
"stringprep",
@@ -5419,7 +6017,7 @@ checksum = "804d3f245f894e61b1e6263c84b23ca675d96753b5abfd5cc8597d86806e8024"
dependencies = [
"native-tls",
"once_cell",
- "tokio",
+ "tokio 1.32.0",
"tokio-native-tls",
]
@@ -5432,7 +6030,7 @@ dependencies = [
"async-bb8-diesel",
"async-trait",
"bb8",
- "bytes",
+ "bytes 1.5.0",
"common_utils",
"config",
"crc32fast",
@@ -5441,7 +6039,7 @@ dependencies = [
"diesel_models",
"dyn-clone",
"error-stack",
- "futures",
+ "futures 0.3.28",
"http",
"masking",
"mime",
@@ -5454,7 +6052,7 @@ dependencies = [
"serde",
"serde_json",
"thiserror",
- "tokio",
+ "tokio 1.32.0",
]
[[package]]
@@ -5555,6 +6153,18 @@ dependencies = [
"unicode-ident",
]
+[[package]]
+name = "syn_derive"
+version = "0.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b"
+dependencies = [
+ "proc-macro-error",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.38",
+]
+
[[package]]
name = "sync_wrapper"
version = "0.1.2"
@@ -5600,13 +6210,19 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417"
+[[package]]
+name = "tap"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
+
[[package]]
name = "tempfile"
version = "3.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
"fastrand 2.0.1",
"redox_syscall 0.3.5",
"rustix 0.38.17",
@@ -5650,7 +6266,7 @@ version = "3.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54c25e2cb8f5fcd7318157634e8838aa6f7e4715c96637f969fabaccd1ef5462"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
"proc-macro-error",
"proc-macro2",
"quote",
@@ -5686,7 +6302,7 @@ dependencies = [
"serial_test",
"thirtyfour",
"time",
- "tokio",
+ "tokio 1.32.0",
"toml 0.7.4",
]
@@ -5701,7 +6317,7 @@ dependencies = [
"chrono",
"cookie",
"fantoccini",
- "futures",
+ "futures 0.3.28",
"http",
"log",
"parking_lot 0.12.1",
@@ -5711,7 +6327,7 @@ dependencies = [
"stringmatch",
"thirtyfour-macros",
"thiserror",
- "tokio",
+ "tokio 1.32.0",
"url",
"urlparse",
]
@@ -5754,7 +6370,7 @@ version = "1.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
"once_cell",
]
@@ -5821,6 +6437,30 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
+[[package]]
+name = "tokio"
+version = "0.1.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5a09c0b5bb588872ab2f09afa13ee6e9dac11e10a0ec9e8e3ba39a5a5d530af6"
+dependencies = [
+ "bytes 0.4.12",
+ "futures 0.1.31",
+ "mio 0.6.23",
+ "num_cpus",
+ "tokio-codec",
+ "tokio-current-thread",
+ "tokio-executor",
+ "tokio-fs",
+ "tokio-io",
+ "tokio-reactor",
+ "tokio-sync",
+ "tokio-tcp",
+ "tokio-threadpool",
+ "tokio-timer",
+ "tokio-udp",
+ "tokio-uds",
+]
+
[[package]]
name = "tokio"
version = "1.32.0"
@@ -5828,9 +6468,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9"
dependencies = [
"backtrace",
- "bytes",
+ "bytes 1.5.0",
"libc",
- "mio",
+ "mio 0.8.8",
"num_cpus",
"parking_lot 0.12.1",
"pin-project-lite",
@@ -5840,6 +6480,59 @@ dependencies = [
"windows-sys",
]
+[[package]]
+name = "tokio-codec"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "25b2998660ba0e70d18684de5d06b70b70a3a747469af9dea7618cc59e75976b"
+dependencies = [
+ "bytes 0.4.12",
+ "futures 0.1.31",
+ "tokio-io",
+]
+
+[[package]]
+name = "tokio-current-thread"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b1de0e32a83f131e002238d7ccde18211c0a5397f60cbfffcb112868c2e0e20e"
+dependencies = [
+ "futures 0.1.31",
+ "tokio-executor",
+]
+
+[[package]]
+name = "tokio-executor"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fb2d1b8f4548dbf5e1f7818512e9c406860678f29c300cdf0ebac72d1a3a1671"
+dependencies = [
+ "crossbeam-utils 0.7.2",
+ "futures 0.1.31",
+]
+
+[[package]]
+name = "tokio-fs"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "297a1206e0ca6302a0eed35b700d292b275256f596e2f3fea7729d5e629b6ff4"
+dependencies = [
+ "futures 0.1.31",
+ "tokio-io",
+ "tokio-threadpool",
+]
+
+[[package]]
+name = "tokio-io"
+version = "0.1.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "57fc868aae093479e3131e3d165c93b1c7474109d13c90ec0dda2a1bbfff0674"
+dependencies = [
+ "bytes 0.4.12",
+ "futures 0.1.31",
+ "log",
+]
+
[[package]]
name = "tokio-io-timeout"
version = "1.2.0"
@@ -5847,7 +6540,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf"
dependencies = [
"pin-project-lite",
- "tokio",
+ "tokio 1.32.0",
]
[[package]]
@@ -5868,7 +6561,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2"
dependencies = [
"native-tls",
- "tokio",
+ "tokio 1.32.0",
+]
+
+[[package]]
+name = "tokio-reactor"
+version = "0.1.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09bc590ec4ba8ba87652da2068d150dcada2cfa2e07faae270a5e0409aa51351"
+dependencies = [
+ "crossbeam-utils 0.7.2",
+ "futures 0.1.31",
+ "lazy_static",
+ "log",
+ "mio 0.6.23",
+ "num_cpus",
+ "parking_lot 0.9.0",
+ "slab",
+ "tokio-executor",
+ "tokio-io",
+ "tokio-sync",
]
[[package]]
@@ -5878,7 +6590,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59"
dependencies = [
"rustls 0.20.9",
- "tokio",
+ "tokio 1.32.0",
"webpki",
]
@@ -5890,7 +6602,93 @@ checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842"
dependencies = [
"futures-core",
"pin-project-lite",
- "tokio",
+ "tokio 1.32.0",
+]
+
+[[package]]
+name = "tokio-sync"
+version = "0.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "edfe50152bc8164fcc456dab7891fa9bf8beaf01c5ee7e1dd43a397c3cf87dee"
+dependencies = [
+ "fnv",
+ "futures 0.1.31",
+]
+
+[[package]]
+name = "tokio-tcp"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "98df18ed66e3b72e742f185882a9e201892407957e45fbff8da17ae7a7c51f72"
+dependencies = [
+ "bytes 0.4.12",
+ "futures 0.1.31",
+ "iovec",
+ "mio 0.6.23",
+ "tokio-io",
+ "tokio-reactor",
+]
+
+[[package]]
+name = "tokio-threadpool"
+version = "0.1.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df720b6581784c118f0eb4310796b12b1d242a7eb95f716a8367855325c25f89"
+dependencies = [
+ "crossbeam-deque 0.7.4",
+ "crossbeam-queue 0.2.3",
+ "crossbeam-utils 0.7.2",
+ "futures 0.1.31",
+ "lazy_static",
+ "log",
+ "num_cpus",
+ "slab",
+ "tokio-executor",
+]
+
+[[package]]
+name = "tokio-timer"
+version = "0.2.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93044f2d313c95ff1cb7809ce9a7a05735b012288a888b62d4434fd58c94f296"
+dependencies = [
+ "crossbeam-utils 0.7.2",
+ "futures 0.1.31",
+ "slab",
+ "tokio-executor",
+]
+
+[[package]]
+name = "tokio-udp"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2a0b10e610b39c38b031a2fcab08e4b82f16ece36504988dcbd81dbba650d82"
+dependencies = [
+ "bytes 0.4.12",
+ "futures 0.1.31",
+ "log",
+ "mio 0.6.23",
+ "tokio-codec",
+ "tokio-io",
+ "tokio-reactor",
+]
+
+[[package]]
+name = "tokio-uds"
+version = "0.2.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ab57a4ac4111c8c9dbcf70779f6fc8bc35ae4b2454809febac840ad19bd7e4e0"
+dependencies = [
+ "bytes 0.4.12",
+ "futures 0.1.31",
+ "iovec",
+ "libc",
+ "log",
+ "mio 0.6.23",
+ "mio-uds",
+ "tokio-codec",
+ "tokio-io",
+ "tokio-reactor",
]
[[package]]
@@ -5899,11 +6697,11 @@ version = "0.7.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d68074620f57a0b21594d9735eb2e98ab38b17f80d3fcb189fca266771ca60d"
dependencies = [
- "bytes",
+ "bytes 1.5.0",
"futures-core",
"futures-sink",
"pin-project-lite",
- "tokio",
+ "tokio 1.32.0",
"tracing",
]
@@ -5925,7 +6723,7 @@ dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
- "toml_edit",
+ "toml_edit 0.19.10",
]
[[package]]
@@ -5947,7 +6745,18 @@ dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
- "winnow",
+ "winnow 0.4.11",
+]
+
+[[package]]
+name = "toml_edit"
+version = "0.20.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "396e4d48bbb2b7554c944bde63101b5ae446cff6ec4a24227428f15eb72ef338"
+dependencies = [
+ "indexmap 2.0.2",
+ "toml_datetime",
+ "winnow 0.5.19",
]
[[package]]
@@ -5960,7 +6769,7 @@ dependencies = [
"async-trait",
"axum",
"base64 0.13.1",
- "bytes",
+ "bytes 1.5.0",
"futures-core",
"futures-util",
"h2",
@@ -5972,7 +6781,7 @@ dependencies = [
"pin-project",
"prost",
"prost-derive",
- "tokio",
+ "tokio 1.32.0",
"tokio-stream",
"tokio-util",
"tower",
@@ -5995,7 +6804,7 @@ dependencies = [
"pin-project-lite",
"rand 0.8.5",
"slab",
- "tokio",
+ "tokio 1.32.0",
"tokio-util",
"tower-layer",
"tower-service",
@@ -6020,7 +6829,7 @@ version = "0.1.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2fce9567bd60a67d08a16488756721ba392f24f29006402881e43b19aac64307"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
"log",
"pin-project-lite",
"tracing-attributes",
@@ -6029,11 +6838,12 @@ dependencies = [
[[package]]
name = "tracing-actix-web"
-version = "0.7.8"
+version = "0.7.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a512ec11fae6c666707625e84f83e5d58f941e9ab15723289c0d380edfe48f09"
+checksum = "1fe0d5feac3f4ca21ba33496bcb1ccab58cca6412b1405ae80f0581541e0ca78"
dependencies = [
"actix-web",
+ "mutually_exclusive_features",
"opentelemetry",
"pin-project",
"tracing",
@@ -6080,6 +6890,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2"
dependencies = [
"pin-project",
+ "tokio 0.1.22",
"tracing",
]
@@ -6131,7 +6942,7 @@ dependencies = [
"serde",
"serde_json",
"sharded-slab",
- "smallvec",
+ "smallvec 1.11.1",
"thread_local",
"tracing",
"tracing-core",
@@ -6389,7 +7200,7 @@ checksum = "8b3c89c2c7e50f33e4d35527e5bf9c11d6d132226dbbd1753f0fbe9f19ef88c6"
dependencies = [
"anyhow",
"git2",
- "rustc_version",
+ "rustc_version 0.4.0",
"rustversion",
"time",
]
@@ -6458,7 +7269,7 @@ version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
"wasm-bindgen-macro",
]
@@ -6483,7 +7294,7 @@ version = "0.4.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
"js-sys",
"wasm-bindgen",
"web-sys",
@@ -6535,7 +7346,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9973cb72c8587d5ad5efdb91e663d36177dc37725e6c90ca86c626b0cc45c93f"
dependencies = [
"base64 0.13.1",
- "bytes",
+ "bytes 1.5.0",
"cookie",
"http",
"log",
@@ -6582,6 +7393,12 @@ dependencies = [
"web-sys",
]
+[[package]]
+name = "winapi"
+version = "0.2.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
+
[[package]]
name = "winapi"
version = "0.3.9"
@@ -6592,6 +7409,12 @@ dependencies = [
"winapi-x86_64-pc-windows-gnu",
]
+[[package]]
+name = "winapi-build"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
+
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
@@ -6604,7 +7427,7 @@ version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596"
dependencies = [
- "winapi",
+ "winapi 0.3.9",
]
[[package]]
@@ -6697,13 +7520,22 @@ dependencies = [
"memchr",
]
+[[package]]
+name = "winnow"
+version = "0.5.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "829846f3e3db426d4cee4510841b71a8e58aa2a76b1132579487ae430ccd9c7b"
+dependencies = [
+ "memchr",
+]
+
[[package]]
name = "winreg"
version = "0.50.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1"
dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
"windows-sys",
]
@@ -6717,7 +7549,7 @@ dependencies = [
"async-trait",
"base64 0.21.4",
"deadpool",
- "futures",
+ "futures 0.3.28",
"futures-timer",
"http-types",
"hyper",
@@ -6726,7 +7558,26 @@ dependencies = [
"regex",
"serde",
"serde_json",
- "tokio",
+ "tokio 1.32.0",
+]
+
+[[package]]
+name = "ws2_32-sys"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e"
+dependencies = [
+ "winapi 0.2.8",
+ "winapi-build",
+]
+
+[[package]]
+name = "wyz"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed"
+dependencies = [
+ "tap",
]
[[package]]
@@ -6775,7 +7626,7 @@ checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261"
dependencies = [
"byteorder",
"crc32fast",
- "crossbeam-utils",
+ "crossbeam-utils 0.8.16",
"flate2",
]
diff --git a/Dockerfile b/Dockerfile
index 8eb321dd2afd..e9591e5e9f27 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-FROM rust:slim-bookworm as builder
+FROM rust:bookworm as builder
ARG EXTRA_FEATURES=""
@@ -36,7 +36,7 @@ RUN cargo build --release --features release ${EXTRA_FEATURES}
-FROM debian:bookworm-slim
+FROM debian:bookworm
# Placing config and binary executable in different directories
ARG CONFIG_DIR=/local/config
diff --git a/README.md b/README.md
index 129a0512d4a0..db8e820ef142 100644
--- a/README.md
+++ b/README.md
@@ -3,7 +3,6 @@
-
The open-source payments switch
@@ -35,7 +34,6 @@ The single API to access payment ecosystems across 130+ countries
-
@@ -57,17 +55,14 @@ Using Hyperswitch, you can:
⚡️ Quick Start Guide
- One-click deployment on AWS cloud
+### One-click deployment on AWS cloud
-The fastest and easiest way to try hyperswitch is via our CDK scripts
+The fastest and easiest way to try Hyperswitch is via our CDK scripts
1. Click on the following button for a quick standalone deployment on AWS, suitable for prototyping.
No code or setup is required in your system and the deployment is covered within the AWS free-tier setup.
- Click here if you have not bootstrapped your region before deploying
-
-
-
+
2. Sign-in to your AWS console.
@@ -75,12 +70,27 @@ The fastest and easiest way to try hyperswitch is via our CDK scripts
For an early access to the production-ready setup fill this Early Access Form
+### Run it on your system
+
+You can run Hyperswitch on your system using Docker Compose after cloning this repository:
+
+```shell
+docker compose up -d
+```
+
+This will start the payments router, the primary component within Hyperswitch.
+
+Check out the [local setup guide][local-setup-guide] for a more comprehensive
+setup, which includes the [scheduler and monitoring services][docker-compose-scheduler-monitoring].
+
+[local-setup-guide]: /docs/try_local_system.md
+[docker-compose-scheduler-monitoring]: /docs/try_local_system.md#run-the-scheduler-and-monitoring-services
+
🔌 Fast Integration for Stripe Users
-If you are already using Stripe, integrating with Hyperswitch is fun, fast &
-easy.
+If you are already using Stripe, integrating with Hyperswitch is fun, fast & easy.
Try the steps below to get a feel for how quick the setup is:
1. Get API keys from our [dashboard].
@@ -99,11 +109,9 @@ Try the steps below to get a feel for how quick the setup is:
As of Sept 2023, we support 50+ payment processors and multiple global payment methods.
In addition, we are continuously integrating new processors based on their reach and community requests.
Our target is to support 100+ processors by H2 2023.
-You can find the latest list of payment processors, supported methods, and
-features
-[here][supported-connectors-and-features].
+You can find the latest list of payment processors, supported methods, and features [here][supported-connectors-and-features].
-[supported-connectors-and-features]: https://docs.google.com/spreadsheets/d/e/2PACX-1vQWHLza9m5iO4Ol-tEBx22_Nnq8Mb3ISCWI53nrinIGLK8eHYmHGnvXFXUXEut8AFyGyI9DipsYaBLG/pubhtml?gid=0&single=true
+[supported-connectors-and-features]: https://hyperswitch.io/pm-list
### 🌟 Hosted Version
@@ -255,11 +263,11 @@ We welcome contributions from the community. Please read through our
Included are directions for opening issues, coding standards, and notes on
development.
-🦀 **Important note for Rust developers**: We aim for contributions from the community
-across a broad range of tracks. Hence, we have prioritised simplicity and code
-readability over purely idiomatic code. For example, some of the code in core
-functions (e.g., `payments_core`) is written to be more readable than
-pure-idiomatic.
+- We appreciate all types of contributions: code, documentation, demo creation, or some new way you want to contribute to us.
+ We will reward every contribution with a Hyperswitch branded t-shirt.
+- 🦀 **Important note for Rust developers**: We aim for contributions from the community across a broad range of tracks.
+ Hence, we have prioritised simplicity and code readability over purely idiomatic code.
+ For example, some of the code in core functions (e.g., `payments_core`) is written to be more readable than pure-idiomatic.
@@ -267,12 +275,10 @@ pure-idiomatic.
Get updates on Hyperswitch development and chat with the community:
-- Read and subscribe to [the official Hyperswitch blog][blog].
-- Join our [Discord server][discord].
-- Join our [Slack workspace][slack].
-- Ask and explore our [GitHub Discussions][github-discussions].
+- [Discord server][discord] for questions related to contributing to hyperswitch, questions about the architecture, components, etc.
+- [Slack workspace][slack] for questions related to integrating hyperswitch, integrating a connector in hyperswitch, etc.
+- [GitHub Discussions][github-discussions] to drop feature requests or suggest anything payments-related you need for your stack.
-[blog]: https://hyperswitch.io/blog
[discord]: https://discord.gg/wJZ7DVW8mm
[slack]: https://join.slack.com/t/hyperswitch-io/shared_invite/zt-1k6cz4lee-SAJzhz6bjmpp4jZCDOtOIg
[github-discussions]: https://github.com/juspay/hyperswitch/discussions
@@ -317,7 +323,6 @@ Check the [CHANGELOG.md](./CHANGELOG.md) file for details.
This product is licensed under the [Apache 2.0 License](LICENSE).
-
✨ Thanks to all contributors
diff --git a/config/config.example.toml b/config/config.example.toml
index f0083bb48b19..d935a4e7f20d 100644
--- a/config/config.example.toml
+++ b/config/config.example.toml
@@ -21,25 +21,25 @@ idle_pool_connection_timeout = 90 # Timeout for idle pool connections (defaults
# Main SQL data store credentials
[master_database]
-username = "db_user" # DB Username
-password = "db_pass" # DB Password. Use base-64 encoded kms encrypted value here when kms is enabled
-host = "localhost" # DB Host
-port = 5432 # DB Port
-dbname = "hyperswitch_db" # Name of Database
-pool_size = 5 # Number of connections to keep open
-connection_timeout = 10 # Timeout for database connection in seconds
-queue_strategy = "Fifo" # Add the queue strategy used by the database bb8 client
+username = "db_user" # DB Username
+password = "db_pass" # DB Password. Use base-64 encoded kms encrypted value here when kms is enabled
+host = "localhost" # DB Host
+port = 5432 # DB Port
+dbname = "hyperswitch_db" # Name of Database
+pool_size = 5 # Number of connections to keep open
+connection_timeout = 10 # Timeout for database connection in seconds
+queue_strategy = "Fifo" # Add the queue strategy used by the database bb8 client
# Replica SQL data store credentials
[replica_database]
-username = "replica_user" # DB Username
-password = "db_pass" # DB Password. Use base-64 encoded kms encrypted value here when kms is enabled
-host = "localhost" # DB Host
-port = 5432 # DB Port
-dbname = "hyperswitch_db" # Name of Database
-pool_size = 5 # Number of connections to keep open
-connection_timeout = 10 # Timeout for database connection in seconds
-queue_strategy = "Fifo" # Add the queue strategy used by the database bb8 client
+username = "replica_user" # DB Username
+password = "db_pass" # DB Password. Use base-64 encoded kms encrypted value here when kms is enabled
+host = "localhost" # DB Host
+port = 5432 # DB Port
+dbname = "hyperswitch_db" # Name of Database
+pool_size = 5 # Number of connections to keep open
+connection_timeout = 10 # Timeout for database connection in seconds
+queue_strategy = "Fifo" # Add the queue strategy used by the database bb8 client
# Redis credentials
[redis]
@@ -53,6 +53,16 @@ default_hash_ttl = 900 # Default TTL for hashes entries, in seconds
use_legacy_version = false # Resp protocol for fred crate (set this to true if using RESPv2 or redis version < 6)
stream_read_count = 1 # Default number of entries to read from stream if not provided in stream read options
+# This section provides configs for currency conversion api
+[forex_api]
+call_delay = 21600 # Api calls are made after every 6 hrs
+local_fetch_retry_count = 5 # Fetch from Local cache has retry count as 5
+local_fetch_retry_delay = 1000 # Retry delay for checking write condition
+api_timeout = 20000 # Api timeouts once it crosses 2000 ms
+api_key = "YOUR API KEY HERE" # Api key for making request to foreign exchange Api
+fallback_api_key = "YOUR API KEY" # Api key for the fallback service
+redis_lock_timeout = 26000 # Redis remains write locked for 26000 ms once the acquire_redis_lock is called
+
# Logging configuration. Logging can be either to file or console or both.
# Logging configuration for file logging
@@ -95,23 +105,24 @@ sampling_rate = 0.1 # decimal rate between 0.0
otel_exporter_otlp_endpoint = "http://localhost:4317" # endpoint to send metrics and traces to, can include port number
otel_exporter_otlp_timeout = 5000 # timeout (in milliseconds) for sending metrics and traces
use_xray_generator = false # Set this to true for AWS X-ray compatible traces
-route_to_trace = [ "*/confirm" ]
+route_to_trace = ["*/confirm"]
# This section provides some secret values.
[secrets]
-master_enc_key = "sample_key" # Master Encryption key used to encrypt merchant wise encryption key. Should be 32-byte long.
-admin_api_key = "test_admin" # admin API key for admin authentication. Only applicable when KMS is disabled.
-kms_encrypted_admin_api_key = "" # Base64-encoded (KMS encrypted) ciphertext of the admin_api_key. Only applicable when KMS is enabled.
-jwt_secret = "secret" # JWT secret used for user authentication. Only applicable when KMS is disabled.
-kms_encrypted_jwt_secret = "" # Base64-encoded (KMS encrypted) ciphertext of the jwt_secret. Only applicable when KMS is enabled.
-recon_admin_api_key = "recon_test_admin" # recon_admin API key for recon authentication. Only applicable when KMS is disabled.
-kms_encrypted_recon_admin_api_key = "" # Base64-encoded (KMS encrypted) ciphertext of the recon_admin_api_key. Only applicable when KMS is enabled
+master_enc_key = "sample_key" # Master Encryption key used to encrypt merchant wise encryption key. Should be 32-byte long.
+admin_api_key = "test_admin" # admin API key for admin authentication. Only applicable when KMS is disabled.
+kms_encrypted_admin_api_key = "" # Base64-encoded (KMS encrypted) ciphertext of the admin_api_key. Only applicable when KMS is enabled.
+jwt_secret = "secret" # JWT secret used for user authentication. Only applicable when KMS is disabled.
+kms_encrypted_jwt_secret = "" # Base64-encoded (KMS encrypted) ciphertext of the jwt_secret. Only applicable when KMS is enabled.
+recon_admin_api_key = "recon_test_admin" # recon_admin API key for recon authentication. Only applicable when KMS is disabled.
+kms_encrypted_recon_admin_api_key = "" # Base64-encoded (KMS encrypted) ciphertext of the recon_admin_api_key. Only applicable when KMS is enabled
# Locker settings contain details for accessing a card locker, a
# PCI Compliant storage entity which stores payment method information
# like card details
[locker]
host = "" # Locker host
+host_rs = "" # Rust Locker host
mock_locker = true # Emulate a locker locally using Postgres
basilisk_host = "" # Basilisk host
locker_signing_key_id = "1" # Key_id to sign basilisk hs locker
@@ -123,14 +134,15 @@ connectors_with_delayed_session_response = "trustpay,payme" # List of connectors
connectors_with_webhook_source_verification_call = "paypal" # List of connectors which has additional source verification api-call
[jwekey] # 4 priv/pub key pair
-locker_key_identifier1 = "" # key identifier for key rotation , should be same as basilisk
-locker_key_identifier2 = "" # key identifier for key rotation , should be same as basilisk
-locker_encryption_key1 = "" # public key 1 in pem format, corresponding private key in basilisk
-locker_encryption_key2 = "" # public key 2 in pem format, corresponding private key in basilisk
-locker_decryption_key1 = "" # private key 1 in pem format, corresponding public key in basilisk
-locker_decryption_key2 = "" # private key 2 in pem format, corresponding public key in basilisk
-vault_encryption_key = "" # public key in pem format, corresponding private key in basilisk-hs
-vault_private_key = "" # private key in pem format, corresponding public key in basilisk-hs
+locker_key_identifier1 = "" # key identifier for key rotation , should be same as basilisk
+locker_key_identifier2 = "" # key identifier for key rotation , should be same as basilisk
+locker_encryption_key1 = "" # public key 1 in pem format, corresponding private key in basilisk
+locker_encryption_key2 = "" # public key 2 in pem format, corresponding private key in basilisk
+locker_decryption_key1 = "" # private key 1 in pem format, corresponding public key in basilisk
+locker_decryption_key2 = "" # private key 2 in pem format, corresponding public key in basilisk
+vault_encryption_key = "" # public key in pem format, corresponding private key in basilisk-hs
+rust_locker_encryption_key = "" # public key in pem format, corresponding private key in rust locker
+vault_private_key = "" # private key in pem format, corresponding public key in basilisk-hs
# Refund configuration
@@ -232,11 +244,11 @@ adyen = { banks = "e_platby_vub,postova_banka,sporo_pay,tatra_pay,viamo" }
# Bank redirect configs for allowed banks through online_banking_poland payment method
[bank_config.online_banking_poland]
-adyen = { banks = "blik_psp,place_zipko,m_bank,pay_with_ing,santander_przelew24,bank_pekaosa,bank_millennium,pay_with_alior_bank,banki_spoldzielcze,pay_with_inteligo,bnp_paribas_poland,bank_nowy_sa,credit_agricole,pay_with_bos,pay_with_citi_handlowy,pay_with_plus_bank,toyota_bank,velo_bank,e_transfer_pocztowy24"}
+adyen = { banks = "blik_psp,place_zipko,m_bank,pay_with_ing,santander_przelew24,bank_pekaosa,bank_millennium,pay_with_alior_bank,banki_spoldzielcze,pay_with_inteligo,bnp_paribas_poland,bank_nowy_sa,credit_agricole,pay_with_bos,pay_with_citi_handlowy,pay_with_plus_bank,toyota_bank,velo_bank,e_transfer_pocztowy24" }
# Bank redirect configs for allowed banks through open_banking_uk payment method
[bank_config.open_banking_uk]
-adyen = { banks = "aib,bank_of_scotland,danske_bank,first_direct,first_trust,halifax,lloyds,monzo,nat_west,nationwide_bank,royal_bank_of_scotland,starling,tsb_bank,tesco_bank,ulster_bank,barclays,hsbc_bank,revolut,santander_przelew24,open_bank_success,open_bank_failure,open_bank_cancelled"}
+adyen = { banks = "aib,bank_of_scotland,danske_bank,first_direct,first_trust,halifax,lloyds,monzo,nat_west,nationwide_bank,royal_bank_of_scotland,starling,tsb_bank,tesco_bank,ulster_bank,barclays,hsbc_bank,revolut,santander_przelew24,open_bank_success,open_bank_failure,open_bank_cancelled" }
# Bank redirect configs for allowed banks through przelewy24 payment method
[bank_config.przelewy24]
@@ -310,90 +322,101 @@ region = "" # The AWS region used by the KMS SDK for decrypting data.
# EmailClient configuration. Only applicable when the `email` feature flag is enabled.
[email]
-from_email = "notify@example.com" # Sender email
-aws_region = "" # AWS region used by AWS SES
-base_url = "" # Base url used when adding links that should redirect to self
+sender_email = "example@example.com" # Sender email
+aws_region = "" # AWS region used by AWS SES
+base_url = "" # Base url used when adding links that should redirect to self
+allowed_unverified_days = 1 # Number of days the api calls ( with jwt token ) can be made without verifying the email
+active_email_client = "SES" # The currently active email client
+
+# Configuration for aws ses, applicable when the active email client is SES
+[email.aws_ses]
+email_role_arn = "" # The amazon resource name ( arn ) of the role which has permission to send emails
+sts_role_session_name = "" # An identifier for the assumed role session, used to uniquely identify a session.
+
#tokenization configuration which describe token lifetime and payment method for specific connector
[tokenization]
stripe = { long_lived_token = false, payment_method = "wallet", payment_method_type = { type = "disable_only", list = "google_pay" } }
checkout = { long_lived_token = false, payment_method = "wallet" }
-mollie = {long_lived_token = false, payment_method = "card"}
+mollie = { long_lived_token = false, payment_method = "card" }
stax = { long_lived_token = true, payment_method = "card,bank_debit" }
-square = {long_lived_token = false, payment_method = "card"}
+square = { long_lived_token = false, payment_method = "card" }
braintree = { long_lived_token = false, payment_method = "card" }
-gocardless = {long_lived_token = true, payment_method = "bank_debit"}
+gocardless = { long_lived_token = true, payment_method = "bank_debit" }
[temp_locker_enable_config]
-stripe = {payment_method = "bank_transfer"}
-nuvei = {payment_method = "card"}
-shift4 = {payment_method = "card"}
-bluesnap = {payment_method = "card"}
+stripe = { payment_method = "bank_transfer" }
+nuvei = { payment_method = "card" }
+shift4 = { payment_method = "card" }
+bluesnap = { payment_method = "card" }
[dummy_connector]
-enabled = true # Whether dummy connector is enabled or not
-payment_ttl = 172800 # Time to live for dummy connector payment in redis
-payment_duration = 1000 # Fake delay duration for dummy connector payment
-payment_tolerance = 100 # Fake delay tolerance for dummy connector payment
-payment_retrieve_duration = 500 # Fake delay duration for dummy connector payment sync
-payment_retrieve_tolerance = 100 # Fake delay tolerance for dummy connector payment sync
-payment_complete_duration = 500 # Fake delay duration for dummy connector payment complete
-payment_complete_tolerance = 100 # Fake delay tolerance for dummy connector payment complete
-refund_ttl = 172800 # Time to live for dummy connector refund in redis
-refund_duration = 1000 # Fake delay duration for dummy connector refund
-refund_tolerance = 100 # Fake delay tolerance for dummy connector refund
-refund_retrieve_duration = 500 # Fake delay duration for dummy connector refund sync
-refund_retrieve_tolerance = 100 # Fake delay tolerance for dummy connector refund sync
-authorize_ttl = 36000 # Time to live for dummy connector authorize request in redis
+enabled = true # Whether dummy connector is enabled or not
+payment_ttl = 172800 # Time to live for dummy connector payment in redis
+payment_duration = 1000 # Fake delay duration for dummy connector payment
+payment_tolerance = 100 # Fake delay tolerance for dummy connector payment
+payment_retrieve_duration = 500 # Fake delay duration for dummy connector payment sync
+payment_retrieve_tolerance = 100 # Fake delay tolerance for dummy connector payment sync
+payment_complete_duration = 500 # Fake delay duration for dummy connector payment complete
+payment_complete_tolerance = 100 # Fake delay tolerance for dummy connector payment complete
+refund_ttl = 172800 # Time to live for dummy connector refund in redis
+refund_duration = 1000 # Fake delay duration for dummy connector refund
+refund_tolerance = 100 # Fake delay tolerance for dummy connector refund
+refund_retrieve_duration = 500 # Fake delay duration for dummy connector refund sync
+refund_retrieve_tolerance = 100 # Fake delay tolerance for dummy connector refund sync
+authorize_ttl = 36000 # Time to live for dummy connector authorize request in redis
assets_base_url = "https://www.example.com/" # Base url for dummy connector assets
default_return_url = "https://www.example.com/" # Default return url when no return url is passed while payment
slack_invite_url = "https://www.example.com/" # Slack invite url for hyperswitch
discord_invite_url = "https://www.example.com/" # Discord invite url for hyperswitch
[mandates.supported_payment_methods]
-card.credit = {connector_list = "stripe,adyen"} # Mandate supported payment method type and connector for card
-wallet.paypal = {connector_list = "adyen"} # Mandate supported payment method type and connector for wallets
-pay_later.klarna = {connector_list = "adyen"} # Mandate supported payment method type and connector for pay_later
-bank_debit.ach = { connector_list = "gocardless"} # Mandate supported payment method type and connector for bank_debit
-bank_debit.becs = { connector_list = "gocardless"} # Mandate supported payment method type and connector for bank_debit
-bank_debit.sepa = { connector_list = "gocardless"} # Mandate supported payment method type and connector for bank_debit
+card.credit = { connector_list = "stripe,adyen" } # Mandate supported payment method type and connector for card
+wallet.paypal = { connector_list = "adyen" } # Mandate supported payment method type and connector for wallets
+pay_later.klarna = { connector_list = "adyen" } # Mandate supported payment method type and connector for pay_later
+bank_debit.ach = { connector_list = "gocardless" } # Mandate supported payment method type and connector for bank_debit
+bank_debit.becs = { connector_list = "gocardless" } # Mandate supported payment method type and connector for bank_debit
+bank_debit.sepa = { connector_list = "gocardless" } # Mandate supported payment method type and connector for bank_debit
# Required fields info used while listing the payment_method_data
[required_fields.pay_later] # payment_method = "pay_later"
-afterpay_clearpay = {fields = {stripe = [ # payment_method_type = afterpay_clearpay, connector = "stripe"
- # Required fields vector with its respective display name in front-end and field_type
- { required_field = "shipping.address.first_name", display_name = "first_name", field_type = "text" },
- { required_field = "shipping.address.last_name", display_name = "last_name", field_type = "text" },
- { required_field = "shipping.address.country", display_name = "country", field_type = { drop_down = { options = [ "US", "IN" ] } } },
- ] } }
+afterpay_clearpay = { fields = { stripe = [ # payment_method_type = afterpay_clearpay, connector = "stripe"
+ # Required fields vector with its respective display name in front-end and field_type
+ { required_field = "shipping.address.first_name", display_name = "first_name", field_type = "text" },
+ { required_field = "shipping.address.last_name", display_name = "last_name", field_type = "text" },
+ { required_field = "shipping.address.country", display_name = "country", field_type = { drop_down = { options = [
+ "US",
+ "IN",
+ ] } } },
+] } }
[payouts]
-payout_eligibility = true # Defaults the eligibility of a payout method to true in case connector does not provide checks for payout eligibility
+payout_eligibility = true # Defaults the eligibility of a payout method to true in case connector does not provide checks for payout eligibility
[pm_filters.adyen]
-online_banking_fpx = {country = "MY", currency = "MYR"}
-online_banking_thailand = {country = "TH", currency = "THB"}
-touch_n_go = {country = "MY", currency = "MYR"}
-atome = {country = "MY,SG", currency = "MYR,SGD"}
-swish = {country = "SE", currency = "SEK"}
-permata_bank_transfer = {country = "ID", currency = "IDR"}
-bca_bank_transfer = {country = "ID", currency = "IDR"}
-bni_va = {country = "ID", currency = "IDR"}
-bri_va = {country = "ID", currency = "IDR"}
-cimb_va = {country = "ID", currency = "IDR"}
-danamon_va = {country = "ID", currency = "IDR"}
-mandiri_va = {country = "ID", currency = "IDR"}
-alfamart = {country = "ID", currency = "IDR"}
-indomaret = {country = "ID", currency = "IDR"}
-open_banking_uk = {country = "GB", currency = "GBP"}
-oxxo = {country = "MX", currency = "MXN"}
-pay_safe_card = {country = "AT,AU,BE,BR,BE,CA,HR,CY,CZ,DK,FI,FR,GE,DE,GI,HU,IS,IE,KW,LV,IE,LI,LT,LU,MT,MX,MD,ME,NL,NZ,NO,PY,PE,PL,PT,RO,SA,RS,SK,SI,ES,SE,CH,TR,UAE,UK,US,UY", currency = "EUR,AUD,BRL,CAD,CZK,DKK,GEL,GIP,HUF,ISK,KWD,CHF,MXN,MDL,NZD,NOK,PYG,PEN,PLN,RON,SAR,RSD,SEK,TRY,AED,GBP,USD,UYU"}
-seven_eleven = {country = "JP", currency = "JPY"}
-lawson = {country = "JP", currency = "JPY"}
-mini_stop = {country = "JP", currency = "JPY"}
-family_mart = {country = "JP", currency = "JPY"}
-seicomart = {country = "JP", currency = "JPY"}
-pay_easy = {country = "JP", currency = "JPY"}
+online_banking_fpx = { country = "MY", currency = "MYR" }
+online_banking_thailand = { country = "TH", currency = "THB" }
+touch_n_go = { country = "MY", currency = "MYR" }
+atome = { country = "MY,SG", currency = "MYR,SGD" }
+swish = { country = "SE", currency = "SEK" }
+permata_bank_transfer = { country = "ID", currency = "IDR" }
+bca_bank_transfer = { country = "ID", currency = "IDR" }
+bni_va = { country = "ID", currency = "IDR" }
+bri_va = { country = "ID", currency = "IDR" }
+cimb_va = { country = "ID", currency = "IDR" }
+danamon_va = { country = "ID", currency = "IDR" }
+mandiri_va = { country = "ID", currency = "IDR" }
+alfamart = { country = "ID", currency = "IDR" }
+indomaret = { country = "ID", currency = "IDR" }
+open_banking_uk = { country = "GB", currency = "GBP" }
+oxxo = { country = "MX", currency = "MXN" }
+pay_safe_card = { country = "AT,AU,BE,BR,BE,CA,HR,CY,CZ,DK,FI,FR,GE,DE,GI,HU,IS,IE,KW,LV,IE,LI,LT,LU,MT,MX,MD,ME,NL,NZ,NO,PY,PE,PL,PT,RO,SA,RS,SK,SI,ES,SE,CH,TR,UAE,UK,US,UY", currency = "EUR,AUD,BRL,CAD,CZK,DKK,GEL,GIP,HUF,ISK,KWD,CHF,MXN,MDL,NZD,NOK,PYG,PEN,PLN,RON,SAR,RSD,SEK,TRY,AED,GBP,USD,UYU" }
+seven_eleven = { country = "JP", currency = "JPY" }
+lawson = { country = "JP", currency = "JPY" }
+mini_stop = { country = "JP", currency = "JPY" }
+family_mart = { country = "JP", currency = "JPY" }
+seicomart = { country = "JP", currency = "JPY" }
+pay_easy = { country = "JP", currency = "JPY" }
[pm_filters.zen]
credit = { not_available_flows = { capture_method = "manual" } }
@@ -412,8 +435,8 @@ credit = { currency = "USD" }
debit = { currency = "USD" }
ach = { currency = "USD" }
-[pm_filters.stripe]
-cashapp = {country = "US", currency = "USD"}
+[pm_filters.prophetpay]
+card_redirect = { currency = "USD" }
[connector_customer]
connector_list = "gocardless,stax,stripe"
@@ -429,10 +452,10 @@ adyen.banks = "bangkok_bank,krungsri_bank,krung_thai_bank,the_siam_commercial_ba
supported_connectors = "braintree"
[applepay_decrypt_keys]
-apple_pay_ppc = "APPLE_PAY_PAYMENT_PROCESSING_CERTIFICATE" #Payment Processing Certificate provided by Apple Pay (https://developer.apple.com/) Certificates, Identifiers & Profiles > Apple Pay Payment Processing Certificate
-apple_pay_ppc_key = "APPLE_PAY_PAYMENT_PROCESSING_CERTIFICATE_KEY" #Private key generate by Elliptic-curve prime256v1 curve
-apple_pay_merchant_cert = "APPLE_PAY_MERCHNAT_CERTIFICATE" #Merchant Certificate provided by Apple Pay (https://developer.apple.com/) Certificates, Identifiers & Profiles > Apple Pay Merchant Identity Certificate
-apple_pay_merchant_cert_key = "APPLE_PAY_MERCHNAT_CERTIFICATE_KEY" #Private key generate by RSA:2048 algorithm
+apple_pay_ppc = "APPLE_PAY_PAYMENT_PROCESSING_CERTIFICATE" #Payment Processing Certificate provided by Apple Pay (https://developer.apple.com/) Certificates, Identifiers & Profiles > Apple Pay Payment Processing Certificate
+apple_pay_ppc_key = "APPLE_PAY_PAYMENT_PROCESSING_CERTIFICATE_KEY" #Private key generate by Elliptic-curve prime256v1 curve
+apple_pay_merchant_cert = "APPLE_PAY_MERCHNAT_CERTIFICATE" #Merchant Certificate provided by Apple Pay (https://developer.apple.com/) Certificates, Identifiers & Profiles > Apple Pay Merchant Identity Certificate
+apple_pay_merchant_cert_key = "APPLE_PAY_MERCHNAT_CERTIFICATE_KEY" #Private key generate by RSA:2048 algorithm
[payment_link]
sdk_url = "http://localhost:9090/dist/HyperLoader.js"
diff --git a/config/development.toml b/config/development.toml
index 63c1f045d94f..fa5fddb0d60a 100644
--- a/config/development.toml
+++ b/config/development.toml
@@ -20,6 +20,7 @@ port = 5432
dbname = "hyperswitch_db"
pool_size = 5
connection_timeout = 10
+min_idle = 2
[replica_database]
username = "db_user"
@@ -48,9 +49,19 @@ applepay_endpoint = "DOMAIN SPECIFIC ENDPOINT"
[locker]
host = ""
+host_rs = ""
mock_locker = true
basilisk_host = ""
+[forex_api]
+call_delay = 21600
+local_fetch_retry_count = 5
+local_fetch_retry_delay = 1000
+api_timeout = 20000
+api_key = "YOUR API KEY HERE"
+fallback_api_key = "YOUR API KEY HERE"
+redis_lock_timeout = 26000
+
[jwekey]
locker_key_identifier1 = ""
locker_key_identifier2 = ""
@@ -59,6 +70,7 @@ locker_encryption_key2 = ""
locker_decryption_key1 = ""
locker_decryption_key2 = ""
vault_encryption_key = ""
+rust_locker_encryption_key = ""
vault_private_key = ""
tunnel_private_key = ""
@@ -200,9 +212,15 @@ disabled = false
consumer_group = "SCHEDULER_GROUP"
[email]
-from_email = "notify@example.com"
+sender_email = "example@example.com"
aws_region = ""
-base_url = ""
+base_url = "http://localhost:8080"
+allowed_unverified_days = 1
+active_email_client = "SES"
+
+[email.aws_ses]
+email_role_arn = ""
+sts_role_session_name = ""
[bank_config.eps]
stripe = { banks = "arzte_und_apotheker_bank,austrian_anadi_bank_ag,bank_austria,bankhaus_carl_spangler,bankhaus_schelhammer_und_schattera_ag,bawag_psk_ag,bks_bank_ag,brull_kallmus_bank_ag,btv_vier_lander_bank,capital_bank_grawe_gruppe_ag,dolomitenbank,easybank_ag,erste_bank_und_sparkassen,hypo_alpeadriabank_international_ag,hypo_noe_lb_fur_niederosterreich_u_wien,hypo_oberosterreich_salzburg_steiermark,hypo_tirol_bank_ag,hypo_vorarlberg_bank_ag,hypo_bank_burgenland_aktiengesellschaft,marchfelder_bank,oberbank_ag,raiffeisen_bankengruppe_osterreich,schoellerbank_ag,sparda_bank_wien,volksbank_gruppe,volkskreditbank_ag,vr_bank_braunau" }
@@ -353,6 +371,9 @@ credit = { currency = "USD" }
debit = { currency = "USD" }
ach = { currency = "USD" }
+[pm_filters.prophetpay]
+card_redirect = { currency = "USD" }
+
[pm_filters.trustpay]
credit = { not_available_flows = { capture_method = "manual" } }
debit = { not_available_flows = { capture_method = "manual" } }
@@ -454,3 +475,33 @@ delay_between_retries_in_milliseconds = 500
[kv_config]
ttl = 900 # 15 * 60 seconds
+
+[events]
+source = "logs"
+
+[events.kafka]
+brokers = ["localhost:9092"]
+intent_analytics_topic = "hyperswitch-payment-intent-events"
+attempt_analytics_topic = "hyperswitch-payment-attempt-events"
+refund_analytics_topic = "hyperswitch-refund-events"
+api_logs_topic = "hyperswitch-api-log-events"
+connector_events_topic = "hyperswitch-connector-api-events"
+
+[analytics]
+source = "sqlx"
+
+[analytics.clickhouse]
+username = "default"
+# password = ""
+host = "http://localhost:8123"
+database_name = "default"
+
+[analytics.sqlx]
+username = "db_user"
+password = "db_pass"
+host = "localhost"
+port = 5432
+dbname = "hyperswitch_db"
+pool_size = 5
+connection_timeout = 10
+queue_strategy = "Fifo"
\ No newline at end of file
diff --git a/config/docker_compose.toml b/config/docker_compose.toml
index ddda7e7021a4..4d50600e1bf8 100644
--- a/config/docker_compose.toml
+++ b/config/docker_compose.toml
@@ -15,7 +15,7 @@ level = "DEBUG" # What you see in your terminal.
[log.telemetry]
traces_enabled = false # Whether traces are enabled.
-metrics_enabled = false # Whether metrics are enabled.
+metrics_enabled = true # Whether metrics are enabled.
ignore_errors = false # Whether to ignore errors during traces or metrics pipeline setup.
otel_exporter_otlp_endpoint = "https://otel-collector:4317" # Endpoint to send metrics and traces to.
use_xray_generator = false
@@ -28,6 +28,15 @@ port = 5432
dbname = "hyperswitch_db"
pool_size = 5
+[forex_api]
+call_delay = 21600
+local_fetch_retry_count = 5
+local_fetch_retry_delay = 1000
+api_timeout = 20000
+api_key = "YOUR API KEY HERE"
+fallback_api_key = "YOUR API KEY HERE"
+redis_lock_timeout = 26000
+
[replica_database]
username = "db_user"
password = "db_pass"
@@ -44,6 +53,7 @@ recon_admin_api_key = "recon_test_admin"
[locker]
host = ""
+host_rs = ""
mock_locker = true
basilisk_host = ""
@@ -55,6 +65,7 @@ locker_encryption_key2 = ""
locker_decryption_key1 = ""
locker_decryption_key2 = ""
vault_encryption_key = ""
+rust_locker_encryption_key = ""
vault_private_key = ""
[redis]
@@ -283,6 +294,9 @@ red_pagos = { country = "UY", currency = "UYU" }
[pm_filters.stripe]
cashapp = {country = "US", currency = "USD"}
+[pm_filters.prophetpay]
+card_redirect = { currency = "USD" }
+
[pm_filters.stax]
credit = { currency = "USD" }
debit = { currency = "USD" }
@@ -319,16 +333,32 @@ supported_connectors = "braintree"
redis_lock_expiry_seconds = 180 # 3 * 60 seconds
delay_between_retries_in_milliseconds = 500
+[events.kafka]
+brokers = ["localhost:9092"]
+intent_analytics_topic = "hyperswitch-payment-intent-events"
+attempt_analytics_topic = "hyperswitch-payment-attempt-events"
+refund_analytics_topic = "hyperswitch-refund-events"
+api_logs_topic = "hyperswitch-api-log-events"
+connector_events_topic = "hyperswitch-connector-api-events"
+
[analytics]
source = "sqlx"
+[analytics.clickhouse]
+username = "default"
+# password = ""
+host = "http://localhost:8123"
+database_name = "default"
+
[analytics.sqlx]
username = "db_user"
password = "db_pass"
-host = "pg"
+host = "localhost"
port = 5432
dbname = "hyperswitch_db"
pool_size = 5
+connection_timeout = 10
+queue_strategy = "Fifo"
[kv_config]
ttl = 900 # 15 * 60 seconds
diff --git a/connector-template/mod.rs b/connector-template/mod.rs
index 7f21962109de..e9945a726a95 100644
--- a/connector-template/mod.rs
+++ b/connector-template/mod.rs
@@ -106,6 +106,7 @@ impl ConnectorCommon for {{project-name | downcase | pascal_case}} {
message: response.message,
reason: response.reason,
attempt_status: None,
+ connector_transaction_id: None,
})
}
}
@@ -485,7 +486,7 @@ impl api::IncomingWebhook for {{project-name | downcase | pascal_case}} {
fn get_webhook_resource_object(
&self,
_request: &api::IncomingWebhookRequestDetails<'_>,
- ) -> CustomResult {
+ ) -> CustomResult, errors::ConnectorError> {
Err(errors::ConnectorError::WebhooksNotImplemented).into_report()
}
}
diff --git a/connector-template/test.rs b/connector-template/test.rs
index 5bbf761dea19..7b093ddb6efa 100644
--- a/connector-template/test.rs
+++ b/connector-template/test.rs
@@ -17,6 +17,7 @@ impl utils::Connector for {{project-name | downcase | pascal_case}}Test {
connector: Box::new(&{{project-name | downcase | pascal_case}}),
connector_name: types::Connector::{{project-name | downcase | pascal_case}},
get_token: types::api::GetToken::Connector,
+ merchant_connector_id: None,
}
}
diff --git a/connector-template/transformers.rs b/connector-template/transformers.rs
index 3ed53a906a2e..bdbfb2e45672 100644
--- a/connector-template/transformers.rs
+++ b/connector-template/transformers.rs
@@ -130,6 +130,7 @@ impl TryFrom kafka-ui is a visual tool for inspecting kafka on localhost:8090
+
+#### Setting up Clickhouse
+
+Once clickhouse is up & running you need to create the required tables for it
+
+you can either visit the url (http://localhost:8123/play) in which the clickhouse-server is running to get a playground
+Alternatively you can bash into the clickhouse container & execute commands manually
+```
+# On your local terminal
+docker compose exec clickhouse-server bash
+
+# Inside the clickhouse-server container shell
+clickhouse-client --user default
+
+# Inside the clickhouse-client shell
+SHOW TABLES;
+CREATE TABLE ......
+```
+
+The table creation scripts are provided [here](./scripts)
+
+#### Running/Debugging your application
+Once setup you can run your application either via docker compose or normally via cargo run
+
+Remember to enable the kafka_events via development.toml/docker_compose.toml files
+
+Inspect the [kafka-ui](http://localhost:8090) to check the messages being inserted in queue
+
+If the messages/topic are available then you can run select queries on your clickhouse table to ensure data is being populated...
+
+If the data is not being populated in clickhouse, you can check the error logs in clickhouse server via
+```
+# Inside the clickhouse-server container shell
+tail -f /var/log/clickhouse-server/clickhouse-server.err.log
+```
\ No newline at end of file
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/README.md b/crates/analytics/docs/clickhouse/cluster_setup/README.md
new file mode 100644
index 000000000000..cd5f2dfeb023
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/README.md
@@ -0,0 +1,347 @@
+# Tutorial for set up clickhouse server
+
+
+## Single server with docker
+
+
+- Run server
+
+```
+docker run -d --name clickhouse-server -p 9000:9000 --ulimit nofile=262144:262144 yandex/clickhouse-server
+
+```
+
+- Run client
+
+```
+docker run -it --rm --link clickhouse-server:clickhouse-server yandex/clickhouse-client --host clickhouse-server
+```
+
+Now you can see if it success setup or not.
+
+
+## Setup Cluster
+
+
+This part we will setup
+
+- 1 cluster, with 3 shards
+- Each shard has 2 replica server
+- Use ReplicatedMergeTree & Distributed table to setup our table.
+
+
+### Cluster
+
+Let's see our docker-compose.yml first.
+
+```
+version: '3'
+
+services:
+ clickhouse-zookeeper:
+ image: zookeeper
+ ports:
+ - "2181:2181"
+ - "2182:2182"
+ container_name: clickhouse-zookeeper
+ hostname: clickhouse-zookeeper
+
+ clickhouse-01:
+ image: yandex/clickhouse-server
+ hostname: clickhouse-01
+ container_name: clickhouse-01
+ ports:
+ - 9001:9000
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-01.xml:/etc/clickhouse-server/config.d/macros.xml
+ # - ./data/server-01:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ clickhouse-02:
+ image: yandex/clickhouse-server
+ hostname: clickhouse-02
+ container_name: clickhouse-02
+ ports:
+ - 9002:9000
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-02.xml:/etc/clickhouse-server/config.d/macros.xml
+ # - ./data/server-02:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ clickhouse-03:
+ image: yandex/clickhouse-server
+ hostname: clickhouse-03
+ container_name: clickhouse-03
+ ports:
+ - 9003:9000
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-03.xml:/etc/clickhouse-server/config.d/macros.xml
+ # - ./data/server-03:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ clickhouse-04:
+ image: yandex/clickhouse-server
+ hostname: clickhouse-04
+ container_name: clickhouse-04
+ ports:
+ - 9004:9000
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-04.xml:/etc/clickhouse-server/config.d/macros.xml
+ # - ./data/server-04:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ clickhouse-05:
+ image: yandex/clickhouse-server
+ hostname: clickhouse-05
+ container_name: clickhouse-05
+ ports:
+ - 9005:9000
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-05.xml:/etc/clickhouse-server/config.d/macros.xml
+ # - ./data/server-05:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ clickhouse-06:
+ image: yandex/clickhouse-server
+ hostname: clickhouse-06
+ container_name: clickhouse-06
+ ports:
+ - 9006:9000
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-06.xml:/etc/clickhouse-server/config.d/macros.xml
+ # - ./data/server-06:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+networks:
+ default:
+ external:
+ name: clickhouse-net
+```
+
+
+We have 6 clickhouse server container and one zookeeper container.
+
+
+**To enable replication ZooKeeper is required. ClickHouse will take care of data consistency on all replicas and run restore procedure after failure automatically. It's recommended to deploy ZooKeeper cluster to separate servers.**
+
+**ZooKeeper is not a requirement — in some simple cases you can duplicate the data by writing it into all the replicas from your application code. This approach is not recommended — in this case ClickHouse is not able to guarantee data consistency on all replicas. This remains the responsibility of your application.**
+
+
+Let's see config file.
+
+`./config/clickhouse_config.xml` is the default config file in docker, we copy it out and add this line
+
+```
+
+ /etc/clickhouse-server/metrika.xml
+```
+
+
+So lets see `clickhouse_metrika.xml`
+
+```
+
+
+
+
+ 1
+ true
+
+ clickhouse-01
+ 9000
+
+
+ clickhouse-06
+ 9000
+
+
+
+ 1
+ true
+
+ clickhouse-02
+ 9000
+
+
+ clickhouse-03
+ 9000
+
+
+
+ 1
+ true
+
+
+ clickhouse-04
+ 9000
+
+
+ clickhouse-05
+ 9000
+
+
+
+
+
+
+ clickhouse-zookeeper
+ 2181
+
+
+
+ ::/0
+
+
+
+ 10000000000
+ 0.01
+ lz4
+
+
+
+```
+
+and macros.xml, each instances has there own macros settings, like server 1:
+
+```
+
+
+ clickhouse-01
+ 01
+ 01
+
+
+```
+
+
+**Make sure your macros settings is equal to remote server settings in metrika.xml**
+
+So now you can start the server.
+
+```
+docker network create clickhouse-net
+docker-compose up -d
+```
+
+Conn to server and see if the cluster settings fine;
+
+```
+docker run -it --rm --network="clickhouse-net" --link clickhouse-01:clickhouse-server yandex/clickhouse-client --host clickhouse-server
+```
+
+```sql
+clickhouse-01 :) select * from system.clusters;
+
+SELECT *
+FROM system.clusters
+
+┌─cluster─────────────────────┬─shard_num─┬─shard_weight─┬─replica_num─┬─host_name─────┬─host_address─┬─port─┬─is_local─┬─user────┬─default_database─┐
+│ cluster_1 │ 1 │ 1 │ 1 │ clickhouse-01 │ 172.21.0.4 │ 9000 │ 1 │ default │ │
+│ cluster_1 │ 1 │ 1 │ 2 │ clickhouse-06 │ 172.21.0.5 │ 9000 │ 1 │ default │ │
+│ cluster_1 │ 2 │ 1 │ 1 │ clickhouse-02 │ 172.21.0.8 │ 9000 │ 0 │ default │ │
+│ cluster_1 │ 2 │ 1 │ 2 │ clickhouse-03 │ 172.21.0.6 │ 9000 │ 0 │ default │ │
+│ cluster_1 │ 3 │ 1 │ 1 │ clickhouse-04 │ 172.21.0.7 │ 9000 │ 0 │ default │ │
+│ cluster_1 │ 3 │ 1 │ 2 │ clickhouse-05 │ 172.21.0.3 │ 9000 │ 0 │ default │ │
+│ test_shard_localhost │ 1 │ 1 │ 1 │ localhost │ 127.0.0.1 │ 9000 │ 1 │ default │ │
+│ test_shard_localhost_secure │ 1 │ 1 │ 1 │ localhost │ 127.0.0.1 │ 9440 │ 0 │ default │ │
+└─────────────────────────────┴───────────┴──────────────┴─────────────┴───────────────┴──────────────┴──────┴──────────┴─────────┴──────────────────┘
+```
+
+If you see this, it means cluster's settings work well(but not conn fine).
+
+
+### Replica Table
+
+So now we have a cluster and replica settings. For clickhouse, we need to create ReplicatedMergeTree Table as a local table in every server.
+
+```sql
+CREATE TABLE ttt (id Int32) ENGINE = ReplicatedMergeTree('/clickhouse/tables/{layer}-{shard}/ttt', '{replica}') PARTITION BY id ORDER BY id
+```
+
+and Create Distributed Table conn to local table
+
+```sql
+CREATE TABLE ttt_all as ttt ENGINE = Distributed(cluster_1, default, ttt, rand());
+```
+
+
+### Insert and test
+
+gen some data and test.
+
+
+```
+# docker exec into client server 1 and
+for ((idx=1;idx<=100;++idx)); do clickhouse-client --host clickhouse-server --query "Insert into default.ttt_all values ($idx)"; done;
+```
+
+For Distributed table.
+
+```
+select count(*) from ttt_all;
+```
+
+For loacl table.
+
+```
+select count(*) from ttt;
+```
+
+
+## Authentication
+
+Please see config/users.xml
+
+
+- Conn
+```bash
+docker run -it --rm --network="clickhouse-net" --link clickhouse-01:clickhouse-server yandex/clickhouse-client --host clickhouse-server -u user1 --password 123456
+```
+
+## Source
+
+- https://clickhouse.yandex/docs/en/operations/table_engines/replication/#creating-replicated-tables
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/clickhouse_config.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/clickhouse_config.xml
new file mode 100644
index 000000000000..94c854dc273a
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/config/clickhouse_config.xml
@@ -0,0 +1,370 @@
+
+
+
+
+ error
+ 1000M
+ 1
+ 10
+
+
+
+ 8123
+ 9000
+
+
+
+
+
+
+
+
+ /etc/clickhouse-server/server.crt
+ /etc/clickhouse-server/server.key
+
+ /etc/clickhouse-server/dhparam.pem
+ none
+ true
+ true
+ sslv2,sslv3
+ true
+
+
+
+ true
+ true
+ sslv2,sslv3
+ true
+
+
+
+ RejectCertificateHandler
+
+
+
+
+
+
+
+
+ 9009
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 4096
+ 3
+
+
+ 100
+
+
+
+
+
+ 8589934592
+
+
+ 5368709120
+
+
+
+ /var/lib/clickhouse/
+
+
+ /var/lib/clickhouse/tmp/
+
+
+ /var/lib/clickhouse/user_files/
+
+
+ users.xml
+
+
+ default
+
+
+
+
+
+ default
+
+
+
+
+
+
+
+
+
+
+
+
+
+ localhost
+ 9000
+
+
+
+
+
+
+ localhost
+ 9440
+ 1
+
+
+
+
+
+
+
+ /etc/clickhouse-server/metrika.xml
+
+
+
+
+
+
+
+
+ 3600
+
+
+
+ 3600
+
+
+ 60
+
+
+
+
+
+
+
+
+
+ system
+
+
+ toYYYYMM(event_date)
+
+ 7500
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ *_dictionary.xml
+
+
+
+
+
+
+
+
+
+ /clickhouse/task_queue/ddl
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ click_cost
+ any
+
+ 0
+ 3600
+
+
+ 86400
+ 60
+
+
+
+ max
+
+ 0
+ 60
+
+
+ 3600
+ 300
+
+
+ 86400
+ 3600
+
+
+
+
+
+ /var/lib/clickhouse/format_schemas/
+
+
+
+
+
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/clickhouse_metrika.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/clickhouse_metrika.xml
new file mode 100644
index 000000000000..b58ffc34bc29
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/config/clickhouse_metrika.xml
@@ -0,0 +1,60 @@
+
+
+
+
+ 1
+ true
+
+ clickhouse-01
+ 9000
+
+
+ clickhouse-06
+ 9000
+
+
+
+ 1
+ true
+
+ clickhouse-02
+ 9000
+
+
+ clickhouse-03
+ 9000
+
+
+
+ 1
+ true
+
+
+ clickhouse-04
+ 9000
+
+
+ clickhouse-05
+ 9000
+
+
+
+
+
+
+ clickhouse-zookeeper
+ 2181
+
+
+
+ ::/0
+
+
+
+ 10000000000
+ 0.01
+ lz4
+
+
+
+
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-01.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-01.xml
new file mode 100644
index 000000000000..75df1c5916e8
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-01.xml
@@ -0,0 +1,9 @@
+
+
+ clickhouse-01
+ 01
+ 01
+ data
+ cluster_1
+
+
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-02.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-02.xml
new file mode 100644
index 000000000000..67e4a545b30c
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-02.xml
@@ -0,0 +1,9 @@
+
+
+ clickhouse-02
+ 02
+ 01
+ data
+ cluster_1
+
+
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-03.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-03.xml
new file mode 100644
index 000000000000..e9278191b80f
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-03.xml
@@ -0,0 +1,9 @@
+
+
+ clickhouse-03
+ 02
+ 01
+ data
+ cluster_1
+
+
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-04.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-04.xml
new file mode 100644
index 000000000000..033c0ad1152e
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-04.xml
@@ -0,0 +1,9 @@
+
+
+ clickhouse-04
+ 03
+ 01
+ data
+ cluster_1
+
+
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-05.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-05.xml
new file mode 100644
index 000000000000..c63314c5acea
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-05.xml
@@ -0,0 +1,9 @@
+
+
+ clickhouse-05
+ 03
+ 01
+ data
+ cluster_1
+
+
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-06.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-06.xml
new file mode 100644
index 000000000000..4b01bda9948c
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-06.xml
@@ -0,0 +1,9 @@
+
+
+ clickhouse-06
+ 01
+ 01
+ data
+ cluster_1
+
+
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/users.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/users.xml
new file mode 100644
index 000000000000..e1b8de78e37a
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/config/users.xml
@@ -0,0 +1,117 @@
+
+
+
+
+
+
+
+ 10000000000
+
+
+ 0
+
+
+ random
+
+
+
+
+ 1
+
+
+
+
+
+
+ 123456
+
+ ::/0
+
+ default
+ default
+
+
+
+
+
+
+
+
+ ::/0
+
+
+
+ default
+
+
+ default
+
+
+
+
+
+
+ ::1
+ 127.0.0.1
+
+ readonly
+ default
+
+
+
+
+
+
+
+
+
+
+ 3600
+
+
+ 0
+ 0
+ 0
+ 0
+ 0
+
+
+
+
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/docker-compose.yml b/crates/analytics/docs/clickhouse/cluster_setup/docker-compose.yml
new file mode 100644
index 000000000000..96d7618b47e6
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/docker-compose.yml
@@ -0,0 +1,198 @@
+version: '3'
+
+networks:
+ ckh_net:
+
+services:
+ clickhouse-zookeeper:
+ image: zookeeper
+ ports:
+ - "2181:2181"
+ - "2182:2182"
+ container_name: clickhouse-zookeeper
+ hostname: clickhouse-zookeeper
+ networks:
+ - ckh_net
+
+ clickhouse-01:
+ image: clickhouse/clickhouse-server
+ hostname: clickhouse-01
+ container_name: clickhouse-01
+ networks:
+ - ckh_net
+ ports:
+ - 9001:9000
+ - 8124:8123
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-01.xml:/etc/clickhouse-server/config.d/macros.xml
+ - ./config/users.xml:/etc/clickhouse-server/users.xml
+ # - ./data/server-01:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ clickhouse-02:
+ image: clickhouse/clickhouse-server
+ hostname: clickhouse-02
+ container_name: clickhouse-02
+ networks:
+ - ckh_net
+ ports:
+ - 9002:9000
+ - 8125:8123
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-02.xml:/etc/clickhouse-server/config.d/macros.xml
+ - ./config/users.xml:/etc/clickhouse-server/users.xml
+ # - ./data/server-02:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ clickhouse-03:
+ image: clickhouse/clickhouse-server
+ hostname: clickhouse-03
+ container_name: clickhouse-03
+ networks:
+ - ckh_net
+ ports:
+ - 9003:9000
+ - 8126:8123
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-03.xml:/etc/clickhouse-server/config.d/macros.xml
+ - ./config/users.xml:/etc/clickhouse-server/users.xml
+ # - ./data/server-03:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ clickhouse-04:
+ image: clickhouse/clickhouse-server
+ hostname: clickhouse-04
+ container_name: clickhouse-04
+ networks:
+ - ckh_net
+ ports:
+ - 9004:9000
+ - 8127:8123
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-04.xml:/etc/clickhouse-server/config.d/macros.xml
+ - ./config/users.xml:/etc/clickhouse-server/users.xml
+ # - ./data/server-04:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ clickhouse-05:
+ image: clickhouse/clickhouse-server
+ hostname: clickhouse-05
+ container_name: clickhouse-05
+ networks:
+ - ckh_net
+ ports:
+ - 9005:9000
+ - 8128:8123
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-05.xml:/etc/clickhouse-server/config.d/macros.xml
+ - ./config/users.xml:/etc/clickhouse-server/users.xml
+ # - ./data/server-05:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ clickhouse-06:
+ image: clickhouse/clickhouse-server
+ hostname: clickhouse-06
+ container_name: clickhouse-06
+ networks:
+ - ckh_net
+ ports:
+ - 9006:9000
+ - 8129:8123
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-06.xml:/etc/clickhouse-server/config.d/macros.xml
+ - ./config/users.xml:/etc/clickhouse-server/users.xml
+ # - ./data/server-06:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ kafka0:
+ image: confluentinc/cp-kafka:7.0.5
+ hostname: kafka0
+ container_name: kafka0
+ ports:
+ - 9092:9092
+ - 9093
+ - 9997
+ - 29092
+ environment:
+ KAFKA_BROKER_ID: 1
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONTROLLER:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092
+ KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
+ KAFKA_PROCESS_ROLES: 'broker,controller'
+ KAFKA_NODE_ID: 1
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka0:29093'
+ KAFKA_LISTENERS: 'PLAINTEXT://kafka0:29092,CONTROLLER://kafka0:29093,PLAINTEXT_HOST://0.0.0.0:9092'
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
+ JMX_PORT: 9997
+ KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka0 -Dcom.sun.management.jmxremote.rmi.port=9997
+ volumes:
+ - ./kafka-script.sh:/tmp/update_run.sh
+ command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
+ networks:
+ ckh_net:
+ aliases:
+ - hyper-c1-kafka-brokers.kafka-cluster.svc.cluster.local
+
+
+ # Kafka UI for debugging kafka queues
+ kafka-ui:
+ container_name: kafka-ui
+ image: provectuslabs/kafka-ui:latest
+ ports:
+ - 8090:8080
+ depends_on:
+ - kafka0
+ networks:
+ - ckh_net
+ environment:
+ KAFKA_CLUSTERS_0_NAME: local
+ KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
+ KAFKA_CLUSTERS_0_JMXPORT: 9997
+
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/kafka-script.sh b/crates/analytics/docs/clickhouse/cluster_setup/kafka-script.sh
new file mode 100755
index 000000000000..023c832b4e1b
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/kafka-script.sh
@@ -0,0 +1,11 @@
+# This script is required to run kafka cluster (without zookeeper)
+#!/bin/sh
+
+# Docker workaround: Remove check for KAFKA_ZOOKEEPER_CONNECT parameter
+sed -i '/KAFKA_ZOOKEEPER_CONNECT/d' /etc/confluent/docker/configure
+
+# Docker workaround: Ignore cub zk-ready
+sed -i 's/cub zk-ready/echo ignore zk-ready/' /etc/confluent/docker/ensure
+
+# KRaft required step: Format the storage directory with a new cluster ID
+echo "kafka-storage format --ignore-formatted -t $(kafka-storage random-uuid) -c /etc/kafka/kafka.properties" >> /etc/confluent/docker/ensure
\ No newline at end of file
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/scripts/api_event_logs.sql b/crates/analytics/docs/clickhouse/cluster_setup/scripts/api_event_logs.sql
new file mode 100644
index 000000000000..0fe194a0e676
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/scripts/api_event_logs.sql
@@ -0,0 +1,237 @@
+CREATE TABLE hyperswitch.api_events_queue on cluster '{cluster}' (
+ `merchant_id` String,
+ `payment_id` Nullable(String),
+ `refund_id` Nullable(String),
+ `payment_method_id` Nullable(String),
+ `payment_method` Nullable(String),
+ `payment_method_type` Nullable(String),
+ `customer_id` Nullable(String),
+ `user_id` Nullable(String),
+ `request_id` String,
+ `flow_type` LowCardinality(String),
+ `api_name` LowCardinality(String),
+ `request` String,
+ `response` String,
+ `status_code` UInt32,
+ `url_path` LowCardinality(Nullable(String)),
+ `event_type` LowCardinality(Nullable(String)),
+ `created_at` DateTime CODEC(T64, LZ4),
+ `latency` Nullable(UInt128),
+ `user_agent` Nullable(String),
+ `ip_addr` Nullable(String)
+) ENGINE = Kafka SETTINGS kafka_broker_list = 'hyper-c1-kafka-brokers.kafka-cluster.svc.cluster.local:9092',
+kafka_topic_list = 'hyperswitch-api-log-events',
+kafka_group_name = 'hyper-c1',
+kafka_format = 'JSONEachRow',
+kafka_handle_error_mode = 'stream';
+
+
+CREATE TABLE hyperswitch.api_events_clustered on cluster '{cluster}' (
+ `merchant_id` String,
+ `payment_id` Nullable(String),
+ `refund_id` Nullable(String),
+ `payment_method_id` Nullable(String),
+ `payment_method` Nullable(String),
+ `payment_method_type` Nullable(String),
+ `customer_id` Nullable(String),
+ `user_id` Nullable(String),
+ `request_id` Nullable(String),
+ `flow_type` LowCardinality(String),
+ `api_name` LowCardinality(String),
+ `request` String,
+ `response` String,
+ `status_code` UInt32,
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `latency` Nullable(UInt128),
+ `user_agent` Nullable(String),
+ `ip_addr` Nullable(String),
+ INDEX flowIndex flow_type TYPE bloom_filter GRANULARITY 1,
+ INDEX apiIndex api_name TYPE bloom_filter GRANULARITY 1,
+ INDEX statusIndex status_code TYPE bloom_filter GRANULARITY 1
+) ENGINE = ReplicatedMergeTree(
+ '/clickhouse/{installation}/{cluster}/tables/{shard}/hyperswitch/api_events_clustered',
+ '{replica}'
+)
+PARTITION BY toStartOfDay(created_at)
+ORDER BY
+ (created_at, merchant_id, flow_type, status_code, api_name)
+TTL created_at + toIntervalMonth(6)
+;
+
+
+CREATE TABLE hyperswitch.api_events_dist on cluster '{cluster}' (
+ `merchant_id` String,
+ `payment_id` Nullable(String),
+ `refund_id` Nullable(String),
+ `payment_method_id` Nullable(String),
+ `payment_method` Nullable(String),
+ `payment_method_type` Nullable(String),
+ `customer_id` Nullable(String),
+ `user_id` Nullable(String),
+ `request_id` Nullable(String),
+ `flow_type` LowCardinality(String),
+ `api_name` LowCardinality(String),
+ `request` String,
+ `response` String,
+ `status_code` UInt32,
+ `url_path` LowCardinality(Nullable(String)),
+ `event_type` LowCardinality(Nullable(String)),
+ `inserted_at` DateTime64(3),
+ `created_at` DateTime64(3),
+ `latency` Nullable(UInt128),
+ `user_agent` Nullable(String),
+ `ip_addr` Nullable(String)
+) ENGINE = Distributed('{cluster}', 'hyperswitch', 'api_events_clustered', rand());
+
+CREATE MATERIALIZED VIEW hyperswitch.api_events_mv on cluster '{cluster}' TO hyperswitch.api_events_dist (
+ `merchant_id` String,
+ `payment_id` Nullable(String),
+ `refund_id` Nullable(String),
+ `payment_method_id` Nullable(String),
+ `payment_method` Nullable(String),
+ `payment_method_type` Nullable(String),
+ `customer_id` Nullable(String),
+ `user_id` Nullable(String),
+ `request_id` Nullable(String),
+ `flow_type` LowCardinality(String),
+ `api_name` LowCardinality(String),
+ `request` String,
+ `response` String,
+ `status_code` UInt32,
+ `url_path` LowCardinality(Nullable(String)),
+ `event_type` LowCardinality(Nullable(String)),
+ `inserted_at` DateTime64(3),
+ `created_at` DateTime64(3),
+ `latency` Nullable(UInt128),
+ `user_agent` Nullable(String),
+ `ip_addr` Nullable(String)
+) AS
+SELECT
+ merchant_id,
+ payment_id,
+ refund_id,
+ payment_method_id,
+ payment_method,
+ payment_method_type,
+ customer_id,
+ user_id,
+ request_id,
+ flow_type,
+ api_name,
+ request,
+ response,
+ status_code,
+ url_path,
+ event_type,
+ now() as inserted_at,
+ created_at,
+ latency,
+ user_agent,
+ ip_addr
+FROM
+ hyperswitch.api_events_queue
+WHERE length(_error) = 0;
+
+
+CREATE MATERIALIZED VIEW hyperswitch.api_events_parse_errors on cluster '{cluster}'
+(
+ `topic` String,
+ `partition` Int64,
+ `offset` Int64,
+ `raw` String,
+ `error` String
+)
+ENGINE = MergeTree
+ORDER BY (topic, partition, offset)
+SETTINGS index_granularity = 8192 AS
+SELECT
+ _topic AS topic,
+ _partition AS partition,
+ _offset AS offset,
+ _raw_message AS raw,
+ _error AS error
+FROM hyperswitch.api_events_queue
+WHERE length(_error) > 0
+;
+
+
+ALTER TABLE hyperswitch.api_events_clustered on cluster '{cluster}' ADD COLUMN `url_path` LowCardinality(Nullable(String));
+ALTER TABLE hyperswitch.api_events_clustered on cluster '{cluster}' ADD COLUMN `event_type` LowCardinality(Nullable(String));
+
+
+CREATE TABLE hyperswitch.api_audit_log ON CLUSTER '{cluster}' (
+ `merchant_id` LowCardinality(String),
+ `payment_id` String,
+ `refund_id` Nullable(String),
+ `payment_method_id` Nullable(String),
+ `payment_method` Nullable(String),
+ `payment_method_type` Nullable(String),
+ `user_id` Nullable(String),
+ `request_id` Nullable(String),
+ `flow_type` LowCardinality(String),
+ `api_name` LowCardinality(String),
+ `request` String,
+ `response` String,
+ `status_code` UInt32,
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `latency` Nullable(UInt128),
+ `user_agent` Nullable(String),
+ `ip_addr` Nullable(String),
+ `url_path` LowCardinality(Nullable(String)),
+ `event_type` LowCardinality(Nullable(String)),
+ `customer_id` LowCardinality(Nullable(String))
+) ENGINE = ReplicatedMergeTree( '/clickhouse/{installation}/{cluster}/tables/{shard}/hyperswitch/api_audit_log', '{replica}' ) PARTITION BY merchant_id
+ORDER BY (merchant_id, payment_id)
+TTL created_at + toIntervalMonth(18)
+SETTINGS index_granularity = 8192
+
+
+CREATE MATERIALIZED VIEW hyperswitch.api_audit_log_mv ON CLUSTER `{cluster}` TO hyperswitch.api_audit_log(
+ `merchant_id` LowCardinality(String),
+ `payment_id` String,
+ `refund_id` Nullable(String),
+ `payment_method_id` Nullable(String),
+ `payment_method` Nullable(String),
+ `payment_method_type` Nullable(String),
+ `customer_id` Nullable(String),
+ `user_id` Nullable(String),
+ `request_id` Nullable(String),
+ `flow_type` LowCardinality(String),
+ `api_name` LowCardinality(String),
+ `request` String,
+ `response` String,
+ `status_code` UInt32,
+ `url_path` LowCardinality(Nullable(String)),
+ `event_type` LowCardinality(Nullable(String)),
+ `inserted_at` DateTime64(3),
+ `created_at` DateTime64(3),
+ `latency` Nullable(UInt128),
+ `user_agent` Nullable(String),
+ `ip_addr` Nullable(String)
+) AS
+SELECT
+ merchant_id,
+ multiIf(payment_id IS NULL, '', payment_id) AS payment_id,
+ refund_id,
+ payment_method_id,
+ payment_method,
+ payment_method_type,
+ customer_id,
+ user_id,
+ request_id,
+ flow_type,
+ api_name,
+ request,
+ response,
+ status_code,
+ url_path,
+ api_event_type AS event_type,
+ now() AS inserted_at,
+ created_at,
+ latency,
+ user_agent,
+ ip_addr
+FROM hyperswitch.api_events_queue
+WHERE length(_error) = 0
\ No newline at end of file
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/scripts/payment_attempts.sql b/crates/analytics/docs/clickhouse/cluster_setup/scripts/payment_attempts.sql
new file mode 100644
index 000000000000..3a6281ae9050
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/scripts/payment_attempts.sql
@@ -0,0 +1,217 @@
+CREATE TABLE hyperswitch.payment_attempt_queue on cluster '{cluster}' (
+ `payment_id` String,
+ `merchant_id` String,
+ `attempt_id` String,
+ `status` LowCardinality(String),
+ `amount` Nullable(UInt32),
+ `currency` LowCardinality(Nullable(String)),
+ `connector` LowCardinality(Nullable(String)),
+ `save_to_locker` Nullable(Bool),
+ `error_message` Nullable(String),
+ `offer_amount` Nullable(UInt32),
+ `surcharge_amount` Nullable(UInt32),
+ `tax_amount` Nullable(UInt32),
+ `payment_method_id` Nullable(String),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_method_type` LowCardinality(Nullable(String)),
+ `connector_transaction_id` Nullable(String),
+ `capture_method` LowCardinality(Nullable(String)),
+ `capture_on` Nullable(DateTime) CODEC(T64, LZ4),
+ `confirm` Bool,
+ `authentication_type` LowCardinality(Nullable(String)),
+ `cancellation_reason` Nullable(String),
+ `amount_to_capture` Nullable(UInt32),
+ `mandate_id` Nullable(String),
+ `browser_info` Nullable(String),
+ `error_code` Nullable(String),
+ `connector_metadata` Nullable(String),
+ `payment_experience` Nullable(String),
+ `created_at` DateTime CODEC(T64, LZ4),
+ `last_synced` Nullable(DateTime) CODEC(T64, LZ4),
+ `modified_at` DateTime CODEC(T64, LZ4),
+ `sign_flag` Int8
+) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
+kafka_topic_list = 'hyperswitch-payment-attempt-events',
+kafka_group_name = 'hyper-c1',
+kafka_format = 'JSONEachRow',
+kafka_handle_error_mode = 'stream';
+
+
+CREATE TABLE hyperswitch.payment_attempt_dist on cluster '{cluster}' (
+ `payment_id` String,
+ `merchant_id` String,
+ `attempt_id` String,
+ `status` LowCardinality(String),
+ `amount` Nullable(UInt32),
+ `currency` LowCardinality(Nullable(String)),
+ `connector` LowCardinality(Nullable(String)),
+ `save_to_locker` Nullable(Bool),
+ `error_message` Nullable(String),
+ `offer_amount` Nullable(UInt32),
+ `surcharge_amount` Nullable(UInt32),
+ `tax_amount` Nullable(UInt32),
+ `payment_method_id` Nullable(String),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_method_type` LowCardinality(Nullable(String)),
+ `connector_transaction_id` Nullable(String),
+ `capture_method` Nullable(String),
+ `capture_on` Nullable(DateTime) CODEC(T64, LZ4),
+ `confirm` Bool,
+ `authentication_type` LowCardinality(Nullable(String)),
+ `cancellation_reason` Nullable(String),
+ `amount_to_capture` Nullable(UInt32),
+ `mandate_id` Nullable(String),
+ `browser_info` Nullable(String),
+ `error_code` Nullable(String),
+ `connector_metadata` Nullable(String),
+ `payment_experience` Nullable(String),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `last_synced` Nullable(DateTime) CODEC(T64, LZ4),
+ `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `sign_flag` Int8
+) ENGINE = Distributed('{cluster}', 'hyperswitch', 'payment_attempt_clustered', cityHash64(attempt_id));
+
+
+
+CREATE MATERIALIZED VIEW hyperswitch.payment_attempt_mv on cluster '{cluster}' TO hyperswitch.payment_attempt_dist (
+ `payment_id` String,
+ `merchant_id` String,
+ `attempt_id` String,
+ `status` LowCardinality(String),
+ `amount` Nullable(UInt32),
+ `currency` LowCardinality(Nullable(String)),
+ `connector` LowCardinality(Nullable(String)),
+ `save_to_locker` Nullable(Bool),
+ `error_message` Nullable(String),
+ `offer_amount` Nullable(UInt32),
+ `surcharge_amount` Nullable(UInt32),
+ `tax_amount` Nullable(UInt32),
+ `payment_method_id` Nullable(String),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_method_type` LowCardinality(Nullable(String)),
+ `connector_transaction_id` Nullable(String),
+ `capture_method` Nullable(String),
+ `confirm` Bool,
+ `authentication_type` LowCardinality(Nullable(String)),
+ `cancellation_reason` Nullable(String),
+ `amount_to_capture` Nullable(UInt32),
+ `mandate_id` Nullable(String),
+ `browser_info` Nullable(String),
+ `error_code` Nullable(String),
+ `connector_metadata` Nullable(String),
+ `payment_experience` Nullable(String),
+ `created_at` DateTime64(3),
+ `capture_on` Nullable(DateTime64(3)),
+ `last_synced` Nullable(DateTime64(3)),
+ `modified_at` DateTime64(3),
+ `inserted_at` DateTime64(3),
+ `sign_flag` Int8
+) AS
+SELECT
+ payment_id,
+ merchant_id,
+ attempt_id,
+ status,
+ amount,
+ currency,
+ connector,
+ save_to_locker,
+ error_message,
+ offer_amount,
+ surcharge_amount,
+ tax_amount,
+ payment_method_id,
+ payment_method,
+ payment_method_type,
+ connector_transaction_id,
+ capture_method,
+ confirm,
+ authentication_type,
+ cancellation_reason,
+ amount_to_capture,
+ mandate_id,
+ browser_info,
+ error_code,
+ connector_metadata,
+ payment_experience,
+ created_at,
+ capture_on,
+ last_synced,
+ modified_at,
+ now() as inserted_at,
+ sign_flag
+FROM
+ hyperswitch.payment_attempt_queue
+WHERE length(_error) = 0;
+
+
+CREATE TABLE hyperswitch.payment_attempt_clustered on cluster '{cluster}' (
+ `payment_id` String,
+ `merchant_id` String,
+ `attempt_id` String,
+ `status` LowCardinality(String),
+ `amount` Nullable(UInt32),
+ `currency` LowCardinality(Nullable(String)),
+ `connector` LowCardinality(Nullable(String)),
+ `save_to_locker` Nullable(Bool),
+ `error_message` Nullable(String),
+ `offer_amount` Nullable(UInt32),
+ `surcharge_amount` Nullable(UInt32),
+ `tax_amount` Nullable(UInt32),
+ `payment_method_id` Nullable(String),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_method_type` LowCardinality(Nullable(String)),
+ `connector_transaction_id` Nullable(String),
+ `capture_method` Nullable(String),
+ `capture_on` Nullable(DateTime) CODEC(T64, LZ4),
+ `confirm` Bool,
+ `authentication_type` LowCardinality(Nullable(String)),
+ `cancellation_reason` Nullable(String),
+ `amount_to_capture` Nullable(UInt32),
+ `mandate_id` Nullable(String),
+ `browser_info` Nullable(String),
+ `error_code` Nullable(String),
+ `connector_metadata` Nullable(String),
+ `payment_experience` Nullable(String),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `last_synced` Nullable(DateTime) CODEC(T64, LZ4),
+ `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `sign_flag` Int8,
+ INDEX connectorIndex connector TYPE bloom_filter GRANULARITY 1,
+ INDEX paymentMethodIndex payment_method TYPE bloom_filter GRANULARITY 1,
+ INDEX authenticationTypeIndex authentication_type TYPE bloom_filter GRANULARITY 1,
+ INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1,
+ INDEX statusIndex status TYPE bloom_filter GRANULARITY 1
+) ENGINE = ReplicatedCollapsingMergeTree(
+ '/clickhouse/{installation}/{cluster}/tables/{shard}/hyperswitch/payment_attempt_clustered',
+ '{replica}',
+ sign_flag
+)
+PARTITION BY toStartOfDay(created_at)
+ORDER BY
+ (created_at, merchant_id, attempt_id)
+TTL created_at + toIntervalMonth(6)
+;
+
+CREATE MATERIALIZED VIEW hyperswitch.payment_attempt_parse_errors on cluster '{cluster}'
+(
+ `topic` String,
+ `partition` Int64,
+ `offset` Int64,
+ `raw` String,
+ `error` String
+)
+ENGINE = MergeTree
+ORDER BY (topic, partition, offset)
+SETTINGS index_granularity = 8192 AS
+SELECT
+ _topic AS topic,
+ _partition AS partition,
+ _offset AS offset,
+ _raw_message AS raw,
+ _error AS error
+FROM hyperswitch.payment_attempt_queue
+WHERE length(_error) > 0
+;
\ No newline at end of file
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/scripts/payment_intents.sql b/crates/analytics/docs/clickhouse/cluster_setup/scripts/payment_intents.sql
new file mode 100644
index 000000000000..eb2d83140e92
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/scripts/payment_intents.sql
@@ -0,0 +1,165 @@
+CREATE TABLE hyperswitch.payment_intents_queue on cluster '{cluster}' (
+ `payment_id` String,
+ `merchant_id` String,
+ `status` LowCardinality(String),
+ `amount` UInt32,
+ `currency` LowCardinality(Nullable(String)),
+ `amount_captured` Nullable(UInt32),
+ `customer_id` Nullable(String),
+ `description` Nullable(String),
+ `return_url` Nullable(String),
+ `connector_id` LowCardinality(Nullable(String)),
+ `statement_descriptor_name` Nullable(String),
+ `statement_descriptor_suffix` Nullable(String),
+ `setup_future_usage` LowCardinality(Nullable(String)),
+ `off_session` Nullable(Bool),
+ `client_secret` Nullable(String),
+ `active_attempt_id` String,
+ `business_country` String,
+ `business_label` String,
+ `modified_at` DateTime,
+ `created_at` DateTime,
+ `last_synced` Nullable(DateTime) CODEC(T64, LZ4),
+ `sign_flag` Int8
+) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
+kafka_topic_list = 'hyperswitch-payment-intent-events',
+kafka_group_name = 'hyper-c1',
+kafka_format = 'JSONEachRow',
+kafka_handle_error_mode = 'stream';
+
+CREATE TABLE hyperswitch.payment_intents_dist on cluster '{cluster}' (
+ `payment_id` String,
+ `merchant_id` String,
+ `status` LowCardinality(String),
+ `amount` UInt32,
+ `currency` LowCardinality(Nullable(String)),
+ `amount_captured` Nullable(UInt32),
+ `customer_id` Nullable(String),
+ `description` Nullable(String),
+ `return_url` Nullable(String),
+ `connector_id` LowCardinality(Nullable(String)),
+ `statement_descriptor_name` Nullable(String),
+ `statement_descriptor_suffix` Nullable(String),
+ `setup_future_usage` LowCardinality(Nullable(String)),
+ `off_session` Nullable(Bool),
+ `client_secret` Nullable(String),
+ `active_attempt_id` String,
+ `business_country` LowCardinality(String),
+ `business_label` String,
+ `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `last_synced` Nullable(DateTime) CODEC(T64, LZ4),
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `sign_flag` Int8
+) ENGINE = Distributed('{cluster}', 'hyperswitch', 'payment_intents_clustered', cityHash64(payment_id));
+
+CREATE TABLE hyperswitch.payment_intents_clustered on cluster '{cluster}' (
+ `payment_id` String,
+ `merchant_id` String,
+ `status` LowCardinality(String),
+ `amount` UInt32,
+ `currency` LowCardinality(Nullable(String)),
+ `amount_captured` Nullable(UInt32),
+ `customer_id` Nullable(String),
+ `description` Nullable(String),
+ `return_url` Nullable(String),
+ `connector_id` LowCardinality(Nullable(String)),
+ `statement_descriptor_name` Nullable(String),
+ `statement_descriptor_suffix` Nullable(String),
+ `setup_future_usage` LowCardinality(Nullable(String)),
+ `off_session` Nullable(Bool),
+ `client_secret` Nullable(String),
+ `active_attempt_id` String,
+ `business_country` LowCardinality(String),
+ `business_label` String,
+ `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `last_synced` Nullable(DateTime) CODEC(T64, LZ4),
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `sign_flag` Int8,
+ INDEX connectorIndex connector_id TYPE bloom_filter GRANULARITY 1,
+ INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1,
+ INDEX statusIndex status TYPE bloom_filter GRANULARITY 1
+) ENGINE = ReplicatedCollapsingMergeTree(
+ '/clickhouse/{installation}/{cluster}/tables/{shard}/hyperswitch/payment_intents_clustered',
+ '{replica}',
+ sign_flag
+)
+PARTITION BY toStartOfDay(created_at)
+ORDER BY
+ (created_at, merchant_id, payment_id)
+TTL created_at + toIntervalMonth(6)
+;
+
+CREATE MATERIALIZED VIEW hyperswitch.payment_intent_mv on cluster '{cluster}' TO hyperswitch.payment_intents_dist (
+ `payment_id` String,
+ `merchant_id` String,
+ `status` LowCardinality(String),
+ `amount` UInt32,
+ `currency` LowCardinality(Nullable(String)),
+ `amount_captured` Nullable(UInt32),
+ `customer_id` Nullable(String),
+ `description` Nullable(String),
+ `return_url` Nullable(String),
+ `connector_id` LowCardinality(Nullable(String)),
+ `statement_descriptor_name` Nullable(String),
+ `statement_descriptor_suffix` Nullable(String),
+ `setup_future_usage` LowCardinality(Nullable(String)),
+ `off_session` Nullable(Bool),
+ `client_secret` Nullable(String),
+ `active_attempt_id` String,
+ `business_country` LowCardinality(String),
+ `business_label` String,
+ `modified_at` DateTime64(3),
+ `created_at` DateTime64(3),
+ `last_synced` Nullable(DateTime64(3)),
+ `inserted_at` DateTime64(3),
+ `sign_flag` Int8
+) AS
+SELECT
+ payment_id,
+ merchant_id,
+ status,
+ amount,
+ currency,
+ amount_captured,
+ customer_id,
+ description,
+ return_url,
+ connector_id,
+ statement_descriptor_name,
+ statement_descriptor_suffix,
+ setup_future_usage,
+ off_session,
+ client_secret,
+ active_attempt_id,
+ business_country,
+ business_label,
+ modified_at,
+ created_at,
+ last_synced,
+ now() as inserted_at,
+ sign_flag
+FROM hyperswitch.payment_intents_queue
+WHERE length(_error) = 0;
+
+CREATE MATERIALIZED VIEW hyperswitch.payment_intent_parse_errors on cluster '{cluster}'
+(
+ `topic` String,
+ `partition` Int64,
+ `offset` Int64,
+ `raw` String,
+ `error` String
+)
+ENGINE = MergeTree
+ORDER BY (topic, partition, offset)
+SETTINGS index_granularity = 8192 AS
+SELECT
+ _topic AS topic,
+ _partition AS partition,
+ _offset AS offset,
+ _raw_message AS raw,
+ _error AS error
+FROM hyperswitch.payment_intents_queue
+WHERE length(_error) > 0
+;
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/scripts/refund_analytics.sql b/crates/analytics/docs/clickhouse/cluster_setup/scripts/refund_analytics.sql
new file mode 100644
index 000000000000..bf5f6e0e2405
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/scripts/refund_analytics.sql
@@ -0,0 +1,173 @@
+CREATE TABLE hyperswitch.refund_queue on cluster '{cluster}' (
+ `internal_reference_id` String,
+ `refund_id` String,
+ `payment_id` String,
+ `merchant_id` String,
+ `connector_transaction_id` String,
+ `connector` LowCardinality(Nullable(String)),
+ `connector_refund_id` Nullable(String),
+ `external_reference_id` Nullable(String),
+ `refund_type` LowCardinality(String),
+ `total_amount` Nullable(UInt32),
+ `currency` LowCardinality(String),
+ `refund_amount` Nullable(UInt32),
+ `refund_status` LowCardinality(String),
+ `sent_to_gateway` Bool,
+ `refund_error_message` Nullable(String),
+ `refund_arn` Nullable(String),
+ `attempt_id` String,
+ `description` Nullable(String),
+ `refund_reason` Nullable(String),
+ `refund_error_code` Nullable(String),
+ `created_at` DateTime,
+ `modified_at` DateTime,
+ `sign_flag` Int8
+) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
+kafka_topic_list = 'hyperswitch-refund-events',
+kafka_group_name = 'hyper-c1',
+kafka_format = 'JSONEachRow',
+kafka_handle_error_mode = 'stream';
+
+CREATE TABLE hyperswitch.refund_dist on cluster '{cluster}' (
+ `internal_reference_id` String,
+ `refund_id` String,
+ `payment_id` String,
+ `merchant_id` String,
+ `connector_transaction_id` String,
+ `connector` LowCardinality(Nullable(String)),
+ `connector_refund_id` Nullable(String),
+ `external_reference_id` Nullable(String),
+ `refund_type` LowCardinality(String),
+ `total_amount` Nullable(UInt32),
+ `currency` LowCardinality(String),
+ `refund_amount` Nullable(UInt32),
+ `refund_status` LowCardinality(String),
+ `sent_to_gateway` Bool,
+ `refund_error_message` Nullable(String),
+ `refund_arn` Nullable(String),
+ `attempt_id` String,
+ `description` Nullable(String),
+ `refund_reason` Nullable(String),
+ `refund_error_code` Nullable(String),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `sign_flag` Int8
+) ENGINE = Distributed('{cluster}', 'hyperswitch', 'refund_clustered', cityHash64(refund_id));
+
+
+
+CREATE TABLE hyperswitch.refund_clustered on cluster '{cluster}' (
+ `internal_reference_id` String,
+ `refund_id` String,
+ `payment_id` String,
+ `merchant_id` String,
+ `connector_transaction_id` String,
+ `connector` LowCardinality(Nullable(String)),
+ `connector_refund_id` Nullable(String),
+ `external_reference_id` Nullable(String),
+ `refund_type` LowCardinality(String),
+ `total_amount` Nullable(UInt32),
+ `currency` LowCardinality(String),
+ `refund_amount` Nullable(UInt32),
+ `refund_status` LowCardinality(String),
+ `sent_to_gateway` Bool,
+ `refund_error_message` Nullable(String),
+ `refund_arn` Nullable(String),
+ `attempt_id` String,
+ `description` Nullable(String),
+ `refund_reason` Nullable(String),
+ `refund_error_code` Nullable(String),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `sign_flag` Int8,
+ INDEX connectorIndex connector TYPE bloom_filter GRANULARITY 1,
+ INDEX refundTypeIndex refund_type TYPE bloom_filter GRANULARITY 1,
+ INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1,
+ INDEX statusIndex refund_status TYPE bloom_filter GRANULARITY 1
+) ENGINE = ReplicatedCollapsingMergeTree(
+ '/clickhouse/{installation}/{cluster}/tables/{shard}/hyperswitch/refund_clustered',
+ '{replica}',
+ sign_flag
+)
+PARTITION BY toStartOfDay(created_at)
+ORDER BY
+ (created_at, merchant_id, refund_id)
+TTL created_at + toIntervalMonth(6)
+;
+
+CREATE MATERIALIZED VIEW hyperswitch.kafka_parse_refund on cluster '{cluster}' TO hyperswitch.refund_dist (
+ `internal_reference_id` String,
+ `refund_id` String,
+ `payment_id` String,
+ `merchant_id` String,
+ `connector_transaction_id` String,
+ `connector` LowCardinality(Nullable(String)),
+ `connector_refund_id` Nullable(String),
+ `external_reference_id` Nullable(String),
+ `refund_type` LowCardinality(String),
+ `total_amount` Nullable(UInt32),
+ `currency` LowCardinality(String),
+ `refund_amount` Nullable(UInt32),
+ `refund_status` LowCardinality(String),
+ `sent_to_gateway` Bool,
+ `refund_error_message` Nullable(String),
+ `refund_arn` Nullable(String),
+ `attempt_id` String,
+ `description` Nullable(String),
+ `refund_reason` Nullable(String),
+ `refund_error_code` Nullable(String),
+ `created_at` DateTime64(3),
+ `modified_at` DateTime64(3),
+ `inserted_at` DateTime64(3),
+ `sign_flag` Int8
+) AS
+SELECT
+ internal_reference_id,
+ refund_id,
+ payment_id,
+ merchant_id,
+ connector_transaction_id,
+ connector,
+ connector_refund_id,
+ external_reference_id,
+ refund_type,
+ total_amount,
+ currency,
+ refund_amount,
+ refund_status,
+ sent_to_gateway,
+ refund_error_message,
+ refund_arn,
+ attempt_id,
+ description,
+ refund_reason,
+ refund_error_code,
+ created_at,
+ modified_at,
+ now() as inserted_at,
+ sign_flag
+FROM hyperswitch.refund_queue
+WHERE length(_error) = 0;
+
+CREATE MATERIALIZED VIEW hyperswitch.refund_parse_errors on cluster '{cluster}'
+(
+ `topic` String,
+ `partition` Int64,
+ `offset` Int64,
+ `raw` String,
+ `error` String
+)
+ENGINE = MergeTree
+ORDER BY (topic, partition, offset)
+SETTINGS index_granularity = 8192 AS
+SELECT
+ _topic AS topic,
+ _partition AS partition,
+ _offset AS offset,
+ _raw_message AS raw,
+ _error AS error
+FROM hyperswitch.refund_queue
+WHERE length(_error) > 0
+;
\ No newline at end of file
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/scripts/sdk_events.sql b/crates/analytics/docs/clickhouse/cluster_setup/scripts/sdk_events.sql
new file mode 100644
index 000000000000..37766392bc70
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/scripts/sdk_events.sql
@@ -0,0 +1,156 @@
+CREATE TABLE hyperswitch.sdk_events_queue on cluster '{cluster}' (
+ `payment_id` Nullable(String),
+ `merchant_id` String,
+ `remote_ip` Nullable(String),
+ `log_type` LowCardinality(Nullable(String)),
+ `event_name` LowCardinality(Nullable(String)),
+ `first_event` LowCardinality(Nullable(String)),
+ `latency` Nullable(UInt32),
+ `timestamp` String,
+ `browser_name` LowCardinality(Nullable(String)),
+ `browser_version` Nullable(String),
+ `platform` LowCardinality(Nullable(String)),
+ `source` LowCardinality(Nullable(String)),
+ `category` LowCardinality(Nullable(String)),
+ `version` LowCardinality(Nullable(String)),
+ `value` Nullable(String),
+ `component` LowCardinality(Nullable(String)),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_experience` LowCardinality(Nullable(String))
+) ENGINE = Kafka SETTINGS
+ kafka_broker_list = 'hyper-c1-kafka-brokers.kafka-cluster.svc.cluster.local:9092',
+ kafka_topic_list = 'hyper-sdk-logs',
+ kafka_group_name = 'hyper-c1',
+ kafka_format = 'JSONEachRow',
+ kafka_handle_error_mode = 'stream';
+
+CREATE TABLE hyperswitch.sdk_events_clustered on cluster '{cluster}' (
+ `payment_id` Nullable(String),
+ `merchant_id` String,
+ `remote_ip` Nullable(String),
+ `log_type` LowCardinality(Nullable(String)),
+ `event_name` LowCardinality(Nullable(String)),
+ `first_event` Bool DEFAULT 1,
+ `browser_name` LowCardinality(Nullable(String)),
+ `browser_version` Nullable(String),
+ `platform` LowCardinality(Nullable(String)),
+ `source` LowCardinality(Nullable(String)),
+ `category` LowCardinality(Nullable(String)),
+ `version` LowCardinality(Nullable(String)),
+ `value` Nullable(String),
+ `component` LowCardinality(Nullable(String)),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_experience` LowCardinality(Nullable(String)) DEFAULT '',
+ `created_at` DateTime64(3) DEFAULT now64() CODEC(T64, LZ4),
+ `inserted_at` DateTime64(3) DEFAULT now64() CODEC(T64, LZ4),
+ `latency` Nullable(UInt32) DEFAULT 0,
+ INDEX paymentMethodIndex payment_method TYPE bloom_filter GRANULARITY 1,
+ INDEX eventIndex event_name TYPE bloom_filter GRANULARITY 1,
+ INDEX platformIndex platform TYPE bloom_filter GRANULARITY 1,
+ INDEX logTypeIndex log_type TYPE bloom_filter GRANULARITY 1,
+ INDEX categoryIndex category TYPE bloom_filter GRANULARITY 1,
+ INDEX sourceIndex source TYPE bloom_filter GRANULARITY 1,
+ INDEX componentIndex component TYPE bloom_filter GRANULARITY 1,
+ INDEX firstEventIndex first_event TYPE bloom_filter GRANULARITY 1
+) ENGINE = ReplicatedMergeTree(
+ '/clickhouse/{installation}/{cluster}/tables/{shard}/hyperswitch/sdk_events_clustered', '{replica}'
+)
+PARTITION BY
+ toStartOfDay(created_at)
+ORDER BY
+ (created_at, merchant_id)
+TTL
+ toDateTime(created_at) + toIntervalMonth(6)
+SETTINGS
+ index_granularity = 8192
+;
+
+CREATE TABLE hyperswitch.sdk_events_dist on cluster '{cluster}' (
+ `payment_id` Nullable(String),
+ `merchant_id` String,
+ `remote_ip` Nullable(String),
+ `log_type` LowCardinality(Nullable(String)),
+ `event_name` LowCardinality(Nullable(String)),
+ `first_event` Bool DEFAULT 1,
+ `browser_name` LowCardinality(Nullable(String)),
+ `browser_version` Nullable(String),
+ `platform` LowCardinality(Nullable(String)),
+ `source` LowCardinality(Nullable(String)),
+ `category` LowCardinality(Nullable(String)),
+ `version` LowCardinality(Nullable(String)),
+ `value` Nullable(String),
+ `component` LowCardinality(Nullable(String)),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_experience` LowCardinality(Nullable(String)) DEFAULT '',
+ `created_at` DateTime64(3) DEFAULT now64() CODEC(T64, LZ4),
+ `inserted_at` DateTime64(3) DEFAULT now64() CODEC(T64, LZ4),
+ `latency` Nullable(UInt32) DEFAULT 0
+) ENGINE = Distributed(
+ '{cluster}', 'hyperswitch', 'sdk_events_clustered', rand()
+);
+
+CREATE MATERIALIZED VIEW hyperswitch.sdk_events_mv on cluster '{cluster}' TO hyperswitch.sdk_events_dist (
+ `payment_id` Nullable(String),
+ `merchant_id` String,
+ `remote_ip` Nullable(String),
+ `log_type` LowCardinality(Nullable(String)),
+ `event_name` LowCardinality(Nullable(String)),
+ `first_event` Bool,
+ `latency` Nullable(UInt32),
+ `browser_name` LowCardinality(Nullable(String)),
+ `browser_version` Nullable(String),
+ `platform` LowCardinality(Nullable(String)),
+ `source` LowCardinality(Nullable(String)),
+ `category` LowCardinality(Nullable(String)),
+ `version` LowCardinality(Nullable(String)),
+ `value` Nullable(String),
+ `component` LowCardinality(Nullable(String)),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_experience` LowCardinality(Nullable(String)),
+ `created_at` DateTime64(3)
+) AS
+SELECT
+ payment_id,
+ merchant_id,
+ remote_ip,
+ log_type,
+ event_name,
+ multiIf(first_event = 'true', 1, 0) AS first_event,
+ latency,
+ browser_name,
+ browser_version,
+ platform,
+ source,
+ category,
+ version,
+ value,
+ component,
+ payment_method,
+ payment_experience,
+ toDateTime64(timestamp, 3) AS created_at
+FROM
+ hyperswitch.sdk_events_queue
+WHERE length(_error) = 0
+;
+
+CREATE MATERIALIZED VIEW hyperswitch.sdk_parse_errors on cluster '{cluster}' (
+ `topic` String,
+ `partition` Int64,
+ `offset` Int64,
+ `raw` String,
+ `error` String
+) ENGINE = MergeTree
+ ORDER BY (topic, partition, offset)
+SETTINGS
+ index_granularity = 8192 AS
+SELECT
+ _topic AS topic,
+ _partition AS partition,
+ _offset AS offset,
+ _raw_message AS raw,
+ _error AS error
+FROM
+ hyperswitch.sdk_events_queue
+WHERE
+ length(_error) > 0
+;
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/scripts/seed_scripts.sql b/crates/analytics/docs/clickhouse/cluster_setup/scripts/seed_scripts.sql
new file mode 100644
index 000000000000..202b94ac6040
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/scripts/seed_scripts.sql
@@ -0,0 +1 @@
+create database hyperswitch on cluster '{cluster}';
\ No newline at end of file
diff --git a/crates/analytics/docs/clickhouse/scripts/api_events_v2.sql b/crates/analytics/docs/clickhouse/scripts/api_events_v2.sql
new file mode 100644
index 000000000000..b41a75fe67e5
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/scripts/api_events_v2.sql
@@ -0,0 +1,134 @@
+CREATE TABLE api_events_v2_queue (
+ `merchant_id` String,
+ `payment_id` Nullable(String),
+ `refund_id` Nullable(String),
+ `payment_method_id` Nullable(String),
+ `payment_method` Nullable(String),
+ `payment_method_type` Nullable(String),
+ `customer_id` Nullable(String),
+ `user_id` Nullable(String),
+ `connector` Nullable(String),
+ `request_id` String,
+ `flow_type` LowCardinality(String),
+ `api_flow` LowCardinality(String),
+ `api_auth_type` LowCardinality(String),
+ `request` String,
+ `response` Nullable(String),
+ `authentication_data` Nullable(String),
+ `status_code` UInt32,
+ `created_at` DateTime CODEC(T64, LZ4),
+ `latency` UInt128,
+ `user_agent` String,
+ `ip_addr` String,
+) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
+kafka_topic_list = 'hyperswitch-api-log-events',
+kafka_group_name = 'hyper-c1',
+kafka_format = 'JSONEachRow',
+kafka_handle_error_mode = 'stream';
+
+
+CREATE TABLE api_events_v2_dist (
+ `merchant_id` String,
+ `payment_id` Nullable(String),
+ `refund_id` Nullable(String),
+ `payment_method_id` Nullable(String),
+ `payment_method` Nullable(String),
+ `payment_method_type` Nullable(String),
+ `customer_id` Nullable(String),
+ `user_id` Nullable(String),
+ `connector` Nullable(String),
+ `request_id` String,
+ `flow_type` LowCardinality(String),
+ `api_flow` LowCardinality(String),
+ `api_auth_type` LowCardinality(String),
+ `request` String,
+ `response` Nullable(String),
+ `authentication_data` Nullable(String),
+ `status_code` UInt32,
+ `created_at` DateTime CODEC(T64, LZ4),
+ `inserted_at` DateTime CODEC(T64, LZ4),
+ `latency` UInt128,
+ `user_agent` String,
+ `ip_addr` String,
+ INDEX flowIndex flow_type TYPE bloom_filter GRANULARITY 1,
+ INDEX apiIndex api_flow TYPE bloom_filter GRANULARITY 1,
+ INDEX statusIndex status_code TYPE bloom_filter GRANULARITY 1
+) ENGINE = MergeTree
+PARTITION BY toStartOfDay(created_at)
+ORDER BY
+ (created_at, merchant_id, flow_type, status_code, api_flow)
+TTL created_at + toIntervalMonth(6)
+;
+
+CREATE MATERIALIZED VIEW api_events_v2_mv TO api_events_v2_dist (
+ `merchant_id` String,
+ `payment_id` Nullable(String),
+ `refund_id` Nullable(String),
+ `payment_method_id` Nullable(String),
+ `payment_method` Nullable(String),
+ `payment_method_type` Nullable(String),
+ `customer_id` Nullable(String),
+ `user_id` Nullable(String),
+ `connector` Nullable(String),
+ `request_id` String,
+ `flow_type` LowCardinality(String),
+ `api_flow` LowCardinality(String),
+ `api_auth_type` LowCardinality(String),
+ `request` String,
+ `response` Nullable(String),
+ `authentication_data` Nullable(String),
+ `status_code` UInt32,
+ `created_at` DateTime CODEC(T64, LZ4),
+ `inserted_at` DateTime CODEC(T64, LZ4),
+ `latency` UInt128,
+ `user_agent` String,
+ `ip_addr` String
+) AS
+SELECT
+ merchant_id,
+ payment_id,
+ refund_id,
+ payment_method_id,
+ payment_method,
+ payment_method_type,
+ customer_id,
+ user_id,
+ connector,
+ request_id,
+ flow_type,
+ api_flow,
+ api_auth_type,
+ request,
+ response,
+ authentication_data,
+ status_code,
+ created_at,
+ now() as inserted_at,
+ latency,
+ user_agent,
+ ip_addr
+FROM
+ api_events_v2_queue
+where length(_error) = 0;
+
+
+CREATE MATERIALIZED VIEW api_events_parse_errors
+(
+ `topic` String,
+ `partition` Int64,
+ `offset` Int64,
+ `raw` String,
+ `error` String
+)
+ENGINE = MergeTree
+ORDER BY (topic, partition, offset)
+SETTINGS index_granularity = 8192 AS
+SELECT
+ _topic AS topic,
+ _partition AS partition,
+ _offset AS offset,
+ _raw_message AS raw,
+ _error AS error
+FROM api_events_v2_queue
+WHERE length(_error) > 0
+;
diff --git a/crates/analytics/docs/clickhouse/scripts/payment_attempts.sql b/crates/analytics/docs/clickhouse/scripts/payment_attempts.sql
new file mode 100644
index 000000000000..276e311e57a9
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/scripts/payment_attempts.sql
@@ -0,0 +1,156 @@
+CREATE TABLE payment_attempts_queue (
+ `payment_id` String,
+ `merchant_id` String,
+ `attempt_id` String,
+ `status` LowCardinality(String),
+ `amount` Nullable(UInt32),
+ `currency` LowCardinality(Nullable(String)),
+ `connector` LowCardinality(Nullable(String)),
+ `save_to_locker` Nullable(Bool),
+ `error_message` Nullable(String),
+ `offer_amount` Nullable(UInt32),
+ `surcharge_amount` Nullable(UInt32),
+ `tax_amount` Nullable(UInt32),
+ `payment_method_id` Nullable(String),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_method_type` LowCardinality(Nullable(String)),
+ `connector_transaction_id` Nullable(String),
+ `capture_method` LowCardinality(Nullable(String)),
+ `capture_on` Nullable(DateTime) CODEC(T64, LZ4),
+ `confirm` Bool,
+ `authentication_type` LowCardinality(Nullable(String)),
+ `cancellation_reason` Nullable(String),
+ `amount_to_capture` Nullable(UInt32),
+ `mandate_id` Nullable(String),
+ `browser_info` Nullable(String),
+ `error_code` Nullable(String),
+ `connector_metadata` Nullable(String),
+ `payment_experience` Nullable(String),
+ `created_at` DateTime CODEC(T64, LZ4),
+ `last_synced` Nullable(DateTime) CODEC(T64, LZ4),
+ `modified_at` DateTime CODEC(T64, LZ4),
+ `sign_flag` Int8
+) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
+kafka_topic_list = 'hyperswitch-payment-attempt-events',
+kafka_group_name = 'hyper-c1',
+kafka_format = 'JSONEachRow',
+kafka_handle_error_mode = 'stream';
+
+CREATE TABLE payment_attempt_dist (
+ `payment_id` String,
+ `merchant_id` String,
+ `attempt_id` String,
+ `status` LowCardinality(String),
+ `amount` Nullable(UInt32),
+ `currency` LowCardinality(Nullable(String)),
+ `connector` LowCardinality(Nullable(String)),
+ `save_to_locker` Nullable(Bool),
+ `error_message` Nullable(String),
+ `offer_amount` Nullable(UInt32),
+ `surcharge_amount` Nullable(UInt32),
+ `tax_amount` Nullable(UInt32),
+ `payment_method_id` Nullable(String),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_method_type` LowCardinality(Nullable(String)),
+ `connector_transaction_id` Nullable(String),
+ `capture_method` Nullable(String),
+ `capture_on` Nullable(DateTime) CODEC(T64, LZ4),
+ `confirm` Bool,
+ `authentication_type` LowCardinality(Nullable(String)),
+ `cancellation_reason` Nullable(String),
+ `amount_to_capture` Nullable(UInt32),
+ `mandate_id` Nullable(String),
+ `browser_info` Nullable(String),
+ `error_code` Nullable(String),
+ `connector_metadata` Nullable(String),
+ `payment_experience` Nullable(String),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `last_synced` Nullable(DateTime) CODEC(T64, LZ4),
+ `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `sign_flag` Int8,
+ INDEX connectorIndex connector TYPE bloom_filter GRANULARITY 1,
+ INDEX paymentMethodIndex payment_method TYPE bloom_filter GRANULARITY 1,
+ INDEX authenticationTypeIndex authentication_type TYPE bloom_filter GRANULARITY 1,
+ INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1,
+ INDEX statusIndex status TYPE bloom_filter GRANULARITY 1
+) ENGINE = CollapsingMergeTree(
+ sign_flag
+)
+PARTITION BY toStartOfDay(created_at)
+ORDER BY
+ (created_at, merchant_id, attempt_id)
+TTL created_at + toIntervalMonth(6)
+;
+
+
+CREATE MATERIALIZED VIEW kafka_parse_pa TO payment_attempt_dist (
+ `payment_id` String,
+ `merchant_id` String,
+ `attempt_id` String,
+ `status` LowCardinality(String),
+ `amount` Nullable(UInt32),
+ `currency` LowCardinality(Nullable(String)),
+ `connector` LowCardinality(Nullable(String)),
+ `save_to_locker` Nullable(Bool),
+ `error_message` Nullable(String),
+ `offer_amount` Nullable(UInt32),
+ `surcharge_amount` Nullable(UInt32),
+ `tax_amount` Nullable(UInt32),
+ `payment_method_id` Nullable(String),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_method_type` LowCardinality(Nullable(String)),
+ `connector_transaction_id` Nullable(String),
+ `capture_method` Nullable(String),
+ `confirm` Bool,
+ `authentication_type` LowCardinality(Nullable(String)),
+ `cancellation_reason` Nullable(String),
+ `amount_to_capture` Nullable(UInt32),
+ `mandate_id` Nullable(String),
+ `browser_info` Nullable(String),
+ `error_code` Nullable(String),
+ `connector_metadata` Nullable(String),
+ `payment_experience` Nullable(String),
+ `created_at` DateTime64(3),
+ `capture_on` Nullable(DateTime64(3)),
+ `last_synced` Nullable(DateTime64(3)),
+ `modified_at` DateTime64(3),
+ `inserted_at` DateTime64(3),
+ `sign_flag` Int8
+) AS
+SELECT
+ payment_id,
+ merchant_id,
+ attempt_id,
+ status,
+ amount,
+ currency,
+ connector,
+ save_to_locker,
+ error_message,
+ offer_amount,
+ surcharge_amount,
+ tax_amount,
+ payment_method_id,
+ payment_method,
+ payment_method_type,
+ connector_transaction_id,
+ capture_method,
+ confirm,
+ authentication_type,
+ cancellation_reason,
+ amount_to_capture,
+ mandate_id,
+ browser_info,
+ error_code,
+ connector_metadata,
+ payment_experience,
+ created_at,
+ capture_on,
+ last_synced,
+ modified_at,
+ now() as inserted_at,
+ sign_flag
+FROM
+ payment_attempts_queue;
+
diff --git a/crates/analytics/docs/clickhouse/scripts/payment_intents.sql b/crates/analytics/docs/clickhouse/scripts/payment_intents.sql
new file mode 100644
index 000000000000..8cd487f364b4
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/scripts/payment_intents.sql
@@ -0,0 +1,116 @@
+CREATE TABLE payment_intents_queue (
+ `payment_id` String,
+ `merchant_id` String,
+ `status` LowCardinality(String),
+ `amount` UInt32,
+ `currency` LowCardinality(Nullable(String)),
+ `amount_captured` Nullable(UInt32),
+ `customer_id` Nullable(String),
+ `description` Nullable(String),
+ `return_url` Nullable(String),
+ `connector_id` LowCardinality(Nullable(String)),
+ `statement_descriptor_name` Nullable(String),
+ `statement_descriptor_suffix` Nullable(String),
+ `setup_future_usage` LowCardinality(Nullable(String)),
+ `off_session` Nullable(Bool),
+ `client_secret` Nullable(String),
+ `active_attempt_id` String,
+ `business_country` String,
+ `business_label` String,
+ `modified_at` DateTime CODEC(T64, LZ4),
+ `created_at` DateTime CODEC(T64, LZ4),
+ `last_synced` Nullable(DateTime) CODEC(T64, LZ4),
+ `sign_flag` Int8
+) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
+kafka_topic_list = 'hyperswitch-payment-intent-events',
+kafka_group_name = 'hyper-c1',
+kafka_format = 'JSONEachRow',
+kafka_handle_error_mode = 'stream';
+
+
+CREATE TABLE payment_intents_dist (
+ `payment_id` String,
+ `merchant_id` String,
+ `status` LowCardinality(String),
+ `amount` UInt32,
+ `currency` LowCardinality(Nullable(String)),
+ `amount_captured` Nullable(UInt32),
+ `customer_id` Nullable(String),
+ `description` Nullable(String),
+ `return_url` Nullable(String),
+ `connector_id` LowCardinality(Nullable(String)),
+ `statement_descriptor_name` Nullable(String),
+ `statement_descriptor_suffix` Nullable(String),
+ `setup_future_usage` LowCardinality(Nullable(String)),
+ `off_session` Nullable(Bool),
+ `client_secret` Nullable(String),
+ `active_attempt_id` String,
+ `business_country` LowCardinality(String),
+ `business_label` String,
+ `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `last_synced` Nullable(DateTime) CODEC(T64, LZ4),
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `sign_flag` Int8,
+ INDEX connectorIndex connector_id TYPE bloom_filter GRANULARITY 1,
+ INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1,
+ INDEX statusIndex status TYPE bloom_filter GRANULARITY 1
+) ENGINE = CollapsingMergeTree(
+ sign_flag
+)
+PARTITION BY toStartOfDay(created_at)
+ORDER BY
+ (created_at, merchant_id, payment_id)
+TTL created_at + toIntervalMonth(6)
+;
+
+CREATE MATERIALIZED VIEW kafka_parse_payment_intent TO payment_intents_dist (
+ `payment_id` String,
+ `merchant_id` String,
+ `status` LowCardinality(String),
+ `amount` UInt32,
+ `currency` LowCardinality(Nullable(String)),
+ `amount_captured` Nullable(UInt32),
+ `customer_id` Nullable(String),
+ `description` Nullable(String),
+ `return_url` Nullable(String),
+ `connector_id` LowCardinality(Nullable(String)),
+ `statement_descriptor_name` Nullable(String),
+ `statement_descriptor_suffix` Nullable(String),
+ `setup_future_usage` LowCardinality(Nullable(String)),
+ `off_session` Nullable(Bool),
+ `client_secret` Nullable(String),
+ `active_attempt_id` String,
+ `business_country` LowCardinality(String),
+ `business_label` String,
+ `modified_at` DateTime64(3),
+ `created_at` DateTime64(3),
+ `last_synced` Nullable(DateTime64(3)),
+ `inserted_at` DateTime64(3),
+ `sign_flag` Int8
+) AS
+SELECT
+ payment_id,
+ merchant_id,
+ status,
+ amount,
+ currency,
+ amount_captured,
+ customer_id,
+ description,
+ return_url,
+ connector_id,
+ statement_descriptor_name,
+ statement_descriptor_suffix,
+ setup_future_usage,
+ off_session,
+ client_secret,
+ active_attempt_id,
+ business_country,
+ business_label,
+ modified_at,
+ created_at,
+ last_synced,
+ now() as inserted_at,
+ sign_flag
+FROM payment_intents_queue;
diff --git a/crates/analytics/docs/clickhouse/scripts/refunds.sql b/crates/analytics/docs/clickhouse/scripts/refunds.sql
new file mode 100644
index 000000000000..a131270c1326
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/scripts/refunds.sql
@@ -0,0 +1,121 @@
+CREATE TABLE refund_queue (
+ `internal_reference_id` String,
+ `refund_id` String,
+ `payment_id` String,
+ `merchant_id` String,
+ `connector_transaction_id` String,
+ `connector` LowCardinality(Nullable(String)),
+ `connector_refund_id` Nullable(String),
+ `external_reference_id` Nullable(String),
+ `refund_type` LowCardinality(String),
+ `total_amount` Nullable(UInt32),
+ `currency` LowCardinality(String),
+ `refund_amount` Nullable(UInt32),
+ `refund_status` LowCardinality(String),
+ `sent_to_gateway` Bool,
+ `refund_error_message` Nullable(String),
+ `refund_arn` Nullable(String),
+ `attempt_id` String,
+ `description` Nullable(String),
+ `refund_reason` Nullable(String),
+ `refund_error_code` Nullable(String),
+ `created_at` DateTime CODEC(T64, LZ4),
+ `modified_at` DateTime CODEC(T64, LZ4),
+ `sign_flag` Int8
+) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
+kafka_topic_list = 'hyperswitch-refund-events',
+kafka_group_name = 'hyper-c1',
+kafka_format = 'JSONEachRow',
+kafka_handle_error_mode = 'stream';
+
+
+CREATE TABLE refund_dist (
+ `internal_reference_id` String,
+ `refund_id` String,
+ `payment_id` String,
+ `merchant_id` String,
+ `connector_transaction_id` String,
+ `connector` LowCardinality(Nullable(String)),
+ `connector_refund_id` Nullable(String),
+ `external_reference_id` Nullable(String),
+ `refund_type` LowCardinality(String),
+ `total_amount` Nullable(UInt32),
+ `currency` LowCardinality(String),
+ `refund_amount` Nullable(UInt32),
+ `refund_status` LowCardinality(String),
+ `sent_to_gateway` Bool,
+ `refund_error_message` Nullable(String),
+ `refund_arn` Nullable(String),
+ `attempt_id` String,
+ `description` Nullable(String),
+ `refund_reason` Nullable(String),
+ `refund_error_code` Nullable(String),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `sign_flag` Int8,
+ INDEX connectorIndex connector TYPE bloom_filter GRANULARITY 1,
+ INDEX refundTypeIndex refund_type TYPE bloom_filter GRANULARITY 1,
+ INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1,
+ INDEX statusIndex refund_status TYPE bloom_filter GRANULARITY 1
+) ENGINE = CollapsingMergeTree(
+ sign_flag
+)
+PARTITION BY toStartOfDay(created_at)
+ORDER BY
+ (created_at, merchant_id, refund_id)
+TTL created_at + toIntervalMonth(6)
+;
+
+CREATE MATERIALIZED VIEW kafka_parse_refund TO refund_dist (
+ `internal_reference_id` String,
+ `refund_id` String,
+ `payment_id` String,
+ `merchant_id` String,
+ `connector_transaction_id` String,
+ `connector` LowCardinality(Nullable(String)),
+ `connector_refund_id` Nullable(String),
+ `external_reference_id` Nullable(String),
+ `refund_type` LowCardinality(String),
+ `total_amount` Nullable(UInt32),
+ `currency` LowCardinality(String),
+ `refund_amount` Nullable(UInt32),
+ `refund_status` LowCardinality(String),
+ `sent_to_gateway` Bool,
+ `refund_error_message` Nullable(String),
+ `refund_arn` Nullable(String),
+ `attempt_id` String,
+ `description` Nullable(String),
+ `refund_reason` Nullable(String),
+ `refund_error_code` Nullable(String),
+ `created_at` DateTime64(3),
+ `modified_at` DateTime64(3),
+ `inserted_at` DateTime64(3),
+ `sign_flag` Int8
+) AS
+SELECT
+ internal_reference_id,
+ refund_id,
+ payment_id,
+ merchant_id,
+ connector_transaction_id,
+ connector,
+ connector_refund_id,
+ external_reference_id,
+ refund_type,
+ total_amount,
+ currency,
+ refund_amount,
+ refund_status,
+ sent_to_gateway,
+ refund_error_message,
+ refund_arn,
+ attempt_id,
+ description,
+ refund_reason,
+ refund_error_code,
+ created_at,
+ modified_at,
+ now() as inserted_at,
+ sign_flag
+FROM refund_queue;
diff --git a/crates/analytics/src/api_event.rs b/crates/analytics/src/api_event.rs
new file mode 100644
index 000000000000..113344d47254
--- /dev/null
+++ b/crates/analytics/src/api_event.rs
@@ -0,0 +1,9 @@
+mod core;
+pub mod events;
+pub mod filters;
+pub mod metrics;
+pub mod types;
+
+pub trait APIEventAnalytics: events::ApiLogsFilterAnalytics {}
+
+pub use self::core::{api_events_core, get_api_event_metrics, get_filters};
diff --git a/crates/analytics/src/api_event/core.rs b/crates/analytics/src/api_event/core.rs
new file mode 100644
index 000000000000..b368d6374f75
--- /dev/null
+++ b/crates/analytics/src/api_event/core.rs
@@ -0,0 +1,176 @@
+use std::collections::HashMap;
+
+use api_models::analytics::{
+ api_event::{
+ ApiEventMetricsBucketIdentifier, ApiEventMetricsBucketValue, ApiLogsRequest,
+ ApiMetricsBucketResponse,
+ },
+ AnalyticsMetadata, ApiEventFiltersResponse, GetApiEventFiltersRequest,
+ GetApiEventMetricRequest, MetricsResponse,
+};
+use error_stack::{IntoReport, ResultExt};
+use router_env::{
+ instrument, logger,
+ tracing::{self, Instrument},
+};
+
+use super::{
+ events::{get_api_event, ApiLogsResult},
+ metrics::ApiEventMetricRow,
+};
+use crate::{
+ errors::{AnalyticsError, AnalyticsResult},
+ metrics,
+ types::FiltersError,
+ AnalyticsProvider,
+};
+
+#[instrument(skip_all)]
+pub async fn api_events_core(
+ pool: &AnalyticsProvider,
+ req: ApiLogsRequest,
+ merchant_id: String,
+) -> AnalyticsResult> {
+ let data = match pool {
+ AnalyticsProvider::Sqlx(_) => Err(FiltersError::NotImplemented)
+ .into_report()
+ .attach_printable("SQL Analytics is not implemented for API Events"),
+ AnalyticsProvider::Clickhouse(pool) => get_api_event(&merchant_id, req, pool).await,
+ AnalyticsProvider::CombinedSqlx(_sqlx_pool, ckh_pool)
+ | AnalyticsProvider::CombinedCkh(_sqlx_pool, ckh_pool) => {
+ get_api_event(&merchant_id, req, ckh_pool).await
+ }
+ }
+ .change_context(AnalyticsError::UnknownError)?;
+ Ok(data)
+}
+
+pub async fn get_filters(
+ pool: &AnalyticsProvider,
+ req: GetApiEventFiltersRequest,
+ merchant_id: String,
+) -> AnalyticsResult {
+ use api_models::analytics::{api_event::ApiEventDimensions, ApiEventFilterValue};
+
+ use super::filters::get_api_event_filter_for_dimension;
+ use crate::api_event::filters::ApiEventFilter;
+
+ let mut res = ApiEventFiltersResponse::default();
+ for dim in req.group_by_names {
+ let values = match pool {
+ AnalyticsProvider::Sqlx(_pool) => Err(FiltersError::NotImplemented)
+ .into_report()
+ .attach_printable("SQL Analytics is not implemented for API Events"),
+ AnalyticsProvider::Clickhouse(ckh_pool)
+ | AnalyticsProvider::CombinedSqlx(_, ckh_pool)
+ | AnalyticsProvider::CombinedCkh(_, ckh_pool) => {
+ get_api_event_filter_for_dimension(dim, &merchant_id, &req.time_range, ckh_pool)
+ .await
+ }
+ }
+ .change_context(AnalyticsError::UnknownError)?
+ .into_iter()
+ .filter_map(|fil: ApiEventFilter| match dim {
+ ApiEventDimensions::StatusCode => fil.status_code.map(|i| i.to_string()),
+ ApiEventDimensions::FlowType => fil.flow_type,
+ ApiEventDimensions::ApiFlow => fil.api_flow,
+ })
+ .collect::>();
+ res.query_data.push(ApiEventFilterValue {
+ dimension: dim,
+ values,
+ })
+ }
+
+ Ok(res)
+}
+
+#[instrument(skip_all)]
+pub async fn get_api_event_metrics(
+ pool: &AnalyticsProvider,
+ merchant_id: &str,
+ req: GetApiEventMetricRequest,
+) -> AnalyticsResult> {
+ let mut metrics_accumulator: HashMap =
+ HashMap::new();
+
+ let mut set = tokio::task::JoinSet::new();
+ for metric_type in req.metrics.iter().cloned() {
+ let req = req.clone();
+ let pool = pool.clone();
+ let task_span = tracing::debug_span!(
+ "analytics_api_metrics_query",
+ api_event_metric = metric_type.as_ref()
+ );
+
+ // TODO: lifetime issues with joinset,
+ // can be optimized away if joinset lifetime requirements are relaxed
+ let merchant_id_scoped = merchant_id.to_owned();
+ set.spawn(
+ async move {
+ let data = pool
+ .get_api_event_metrics(
+ &metric_type,
+ &req.group_by_names.clone(),
+ &merchant_id_scoped,
+ &req.filters,
+ &req.time_series.map(|t| t.granularity),
+ &req.time_range,
+ )
+ .await
+ .change_context(AnalyticsError::UnknownError);
+ (metric_type, data)
+ }
+ .instrument(task_span),
+ );
+ }
+
+ while let Some((metric, data)) = set
+ .join_next()
+ .await
+ .transpose()
+ .into_report()
+ .change_context(AnalyticsError::UnknownError)?
+ {
+ let data = data?;
+ let attributes = &[
+ metrics::request::add_attributes("metric_type", metric.to_string()),
+ metrics::request::add_attributes("source", pool.to_string()),
+ ];
+
+ let value = u64::try_from(data.len());
+ if let Ok(val) = value {
+ metrics::BUCKETS_FETCHED.record(&metrics::CONTEXT, val, attributes);
+ logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val);
+ }
+ for (id, value) in data {
+ metrics_accumulator
+ .entry(id)
+ .and_modify(|data| {
+ data.api_count = data.api_count.or(value.api_count);
+ data.status_code_count = data.status_code_count.or(value.status_code_count);
+ data.latency = data.latency.or(value.latency);
+ })
+ .or_insert(value);
+ }
+ }
+
+ let query_data: Vec = metrics_accumulator
+ .into_iter()
+ .map(|(id, val)| ApiMetricsBucketResponse {
+ values: ApiEventMetricsBucketValue {
+ latency: val.latency,
+ api_count: val.api_count,
+ status_code_count: val.status_code_count,
+ },
+ dimensions: id,
+ })
+ .collect();
+
+ Ok(MetricsResponse {
+ query_data,
+ meta_data: [AnalyticsMetadata {
+ current_time_range: req.time_range,
+ }],
+ })
+}
diff --git a/crates/analytics/src/api_event/events.rs b/crates/analytics/src/api_event/events.rs
new file mode 100644
index 000000000000..73b3fb9cbad2
--- /dev/null
+++ b/crates/analytics/src/api_event/events.rs
@@ -0,0 +1,105 @@
+use api_models::analytics::{
+ api_event::{ApiLogsRequest, QueryType},
+ Granularity,
+};
+use common_utils::errors::ReportSwitchExt;
+use error_stack::ResultExt;
+use router_env::Flow;
+use time::PrimitiveDateTime;
+
+use crate::{
+ query::{Aggregate, GroupByClause, QueryBuilder, ToSql, Window},
+ types::{AnalyticsCollection, AnalyticsDataSource, FiltersError, FiltersResult, LoadRow},
+};
+pub trait ApiLogsFilterAnalytics: LoadRow {}
+
+pub async fn get_api_event(
+ merchant_id: &String,
+ query_param: ApiLogsRequest,
+ pool: &T,
+) -> FiltersResult>
+where
+ T: AnalyticsDataSource + ApiLogsFilterAnalytics,
+ PrimitiveDateTime: ToSql,
+ AnalyticsCollection: ToSql,
+ Granularity: GroupByClause,
+ Aggregate<&'static str>: ToSql,
+ Window<&'static str>: ToSql,
+{
+ let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::ApiEvents);
+ query_builder.add_select_column("*").switch()?;
+
+ query_builder
+ .add_filter_clause("merchant_id", merchant_id)
+ .switch()?;
+ match query_param.query_param {
+ QueryType::Payment { payment_id } => query_builder
+ .add_filter_clause("payment_id", payment_id)
+ .switch()?,
+ QueryType::Refund {
+ payment_id,
+ refund_id,
+ } => {
+ query_builder
+ .add_filter_clause("payment_id", payment_id)
+ .switch()?;
+ query_builder
+ .add_filter_clause("refund_id", refund_id)
+ .switch()?;
+ }
+ }
+ if let Some(list_api_name) = query_param.api_name_filter {
+ query_builder
+ .add_filter_in_range_clause("api_flow", &list_api_name)
+ .switch()?;
+ } else {
+ query_builder
+ .add_filter_in_range_clause(
+ "api_flow",
+ &[
+ Flow::PaymentsCancel,
+ Flow::PaymentsCapture,
+ Flow::PaymentsConfirm,
+ Flow::PaymentsCreate,
+ Flow::PaymentsStart,
+ Flow::PaymentsUpdate,
+ Flow::RefundsCreate,
+ Flow::IncomingWebhookReceive,
+ ],
+ )
+ .switch()?;
+ }
+ //TODO!: update the execute_query function to return reports instead of plain errors...
+ query_builder
+ .execute_query::(pool)
+ .await
+ .change_context(FiltersError::QueryBuildingError)?
+ .change_context(FiltersError::QueryExecutionFailure)
+}
+#[derive(Debug, serde::Serialize, serde::Deserialize)]
+pub struct ApiLogsResult {
+ pub merchant_id: String,
+ pub payment_id: Option,
+ pub refund_id: Option,
+ pub payment_method_id: Option,
+ pub payment_method: Option,
+ pub payment_method_type: Option,
+ pub customer_id: Option,
+ pub user_id: Option,
+ pub connector: Option,
+ pub request_id: Option,
+ pub flow_type: String,
+ pub api_flow: String,
+ pub api_auth_type: Option,
+ pub request: String,
+ pub response: Option,
+ pub error: Option,
+ pub authentication_data: Option,
+ pub status_code: u16,
+ pub latency: Option,
+ pub user_agent: Option,
+ pub hs_latency: Option,
+ pub ip_addr: Option,
+ #[serde(with = "common_utils::custom_serde::iso8601")]
+ pub created_at: PrimitiveDateTime,
+}
diff --git a/crates/analytics/src/api_event/filters.rs b/crates/analytics/src/api_event/filters.rs
new file mode 100644
index 000000000000..87414ebad4ba
--- /dev/null
+++ b/crates/analytics/src/api_event/filters.rs
@@ -0,0 +1,53 @@
+use api_models::analytics::{api_event::ApiEventDimensions, Granularity, TimeRange};
+use common_utils::errors::ReportSwitchExt;
+use error_stack::ResultExt;
+use time::PrimitiveDateTime;
+
+use crate::{
+ query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
+ types::{AnalyticsCollection, AnalyticsDataSource, FiltersError, FiltersResult, LoadRow},
+};
+
+pub trait ApiEventFilterAnalytics: LoadRow {}
+
+pub async fn get_api_event_filter_for_dimension(
+ dimension: ApiEventDimensions,
+ merchant_id: &String,
+ time_range: &TimeRange,
+ pool: &T,
+) -> FiltersResult>
+where
+ T: AnalyticsDataSource + ApiEventFilterAnalytics,
+ PrimitiveDateTime: ToSql,
+ AnalyticsCollection: ToSql,
+ Granularity: GroupByClause,
+ Aggregate<&'static str>: ToSql,
+ Window<&'static str>: ToSql,
+{
+ let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::ApiEvents);
+
+ query_builder.add_select_column(dimension).switch()?;
+ time_range
+ .set_filter_clause(&mut query_builder)
+ .attach_printable("Error filtering time range")
+ .switch()?;
+
+ query_builder
+ .add_filter_clause("merchant_id", merchant_id)
+ .switch()?;
+
+ query_builder.set_distinct();
+
+ query_builder
+ .execute_query::(pool)
+ .await
+ .change_context(FiltersError::QueryBuildingError)?
+ .change_context(FiltersError::QueryExecutionFailure)
+}
+
+#[derive(Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)]
+pub struct ApiEventFilter {
+ pub status_code: Option,
+ pub flow_type: Option,
+ pub api_flow: Option,
+}
diff --git a/crates/analytics/src/api_event/metrics.rs b/crates/analytics/src/api_event/metrics.rs
new file mode 100644
index 000000000000..16f2d7a2f5ab
--- /dev/null
+++ b/crates/analytics/src/api_event/metrics.rs
@@ -0,0 +1,110 @@
+use api_models::analytics::{
+ api_event::{
+ ApiEventDimensions, ApiEventFilters, ApiEventMetrics, ApiEventMetricsBucketIdentifier,
+ },
+ Granularity, TimeRange,
+};
+use time::PrimitiveDateTime;
+
+use crate::{
+ query::{Aggregate, GroupByClause, ToSql, Window},
+ types::{AnalyticsCollection, AnalyticsDataSource, LoadRow, MetricsResult},
+};
+
+mod api_count;
+pub mod latency;
+mod status_code_count;
+use api_count::ApiCount;
+use latency::MaxLatency;
+use status_code_count::StatusCodeCount;
+
+use self::latency::LatencyAvg;
+
+#[derive(Debug, PartialEq, Eq, serde::Deserialize)]
+pub struct ApiEventMetricRow {
+ pub latency: Option,
+ pub api_count: Option,
+ pub status_code_count: Option,
+ #[serde(with = "common_utils::custom_serde::iso8601::option")]
+ pub start_bucket: Option,
+ #[serde(with = "common_utils::custom_serde::iso8601::option")]
+ pub end_bucket: Option,
+}
+
+pub trait ApiEventMetricAnalytics: LoadRow + LoadRow {}
+
+#[async_trait::async_trait]
+pub trait ApiEventMetric
+where
+ T: AnalyticsDataSource + ApiEventMetricAnalytics,
+{
+ async fn load_metrics(
+ &self,
+ dimensions: &[ApiEventDimensions],
+ merchant_id: &str,
+ filters: &ApiEventFilters,
+ granularity: &Option,
+ time_range: &TimeRange,
+ pool: &T,
+ ) -> MetricsResult>;
+}
+
+#[async_trait::async_trait]
+impl ApiEventMetric for ApiEventMetrics
+where
+ T: AnalyticsDataSource + ApiEventMetricAnalytics,
+ PrimitiveDateTime: ToSql,
+ AnalyticsCollection: ToSql,
+ Granularity: GroupByClause,
+ Aggregate<&'static str>: ToSql,
+ Window<&'static str>: ToSql,
+{
+ async fn load_metrics(
+ &self,
+ dimensions: &[ApiEventDimensions],
+ merchant_id: &str,
+ filters: &ApiEventFilters,
+ granularity: &Option,
+ time_range: &TimeRange,
+ pool: &T,
+ ) -> MetricsResult> {
+ match self {
+ Self::Latency => {
+ MaxLatency
+ .load_metrics(
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ pool,
+ )
+ .await
+ }
+ Self::ApiCount => {
+ ApiCount
+ .load_metrics(
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ pool,
+ )
+ .await
+ }
+ Self::StatusCodeCount => {
+ StatusCodeCount
+ .load_metrics(
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ pool,
+ )
+ .await
+ }
+ }
+ }
+}
diff --git a/crates/analytics/src/api_event/metrics/api_count.rs b/crates/analytics/src/api_event/metrics/api_count.rs
new file mode 100644
index 000000000000..7f5f291aa53e
--- /dev/null
+++ b/crates/analytics/src/api_event/metrics/api_count.rs
@@ -0,0 +1,106 @@
+use api_models::analytics::{
+ api_event::{ApiEventDimensions, ApiEventFilters, ApiEventMetricsBucketIdentifier},
+ Granularity, TimeRange,
+};
+use common_utils::errors::ReportSwitchExt;
+use error_stack::ResultExt;
+use time::PrimitiveDateTime;
+
+use super::ApiEventMetricRow;
+use crate::{
+ query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
+ types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
+};
+
+#[derive(Default)]
+pub(super) struct ApiCount;
+
+#[async_trait::async_trait]
+impl super::ApiEventMetric for ApiCount
+where
+ T: AnalyticsDataSource + super::ApiEventMetricAnalytics,
+ PrimitiveDateTime: ToSql,
+ AnalyticsCollection: ToSql,
+ Granularity: GroupByClause,
+ Aggregate<&'static str>: ToSql,
+ Window<&'static str>: ToSql,
+{
+ async fn load_metrics(
+ &self,
+ _dimensions: &[ApiEventDimensions],
+ merchant_id: &str,
+ filters: &ApiEventFilters,
+ granularity: &Option,
+ time_range: &TimeRange,
+ pool: &T,
+ ) -> MetricsResult> {
+ let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::ApiEvents);
+
+ query_builder
+ .add_select_column(Aggregate::Count {
+ field: None,
+ alias: Some("api_count"),
+ })
+ .switch()?;
+ if !filters.flow_type.is_empty() {
+ query_builder
+ .add_filter_in_range_clause(ApiEventDimensions::FlowType, &filters.flow_type)
+ .attach_printable("Error adding flow_type filter")
+ .switch()?;
+ }
+ query_builder
+ .add_select_column(Aggregate::Min {
+ field: "created_at",
+ alias: Some("start_bucket"),
+ })
+ .switch()?;
+ query_builder
+ .add_select_column(Aggregate::Max {
+ field: "created_at",
+ alias: Some("end_bucket"),
+ })
+ .switch()?;
+ if let Some(granularity) = granularity.as_ref() {
+ granularity
+ .set_group_by_clause(&mut query_builder)
+ .attach_printable("Error adding granularity")
+ .switch()?;
+ }
+
+ query_builder
+ .add_filter_clause("merchant_id", merchant_id)
+ .switch()?;
+
+ time_range
+ .set_filter_clause(&mut query_builder)
+ .attach_printable("Error filtering time range")
+ .switch()?;
+
+ query_builder
+ .execute_query::(pool)
+ .await
+ .change_context(MetricsError::QueryBuildingError)?
+ .change_context(MetricsError::QueryExecutionFailure)?
+ .into_iter()
+ .map(|i| {
+ Ok((
+ ApiEventMetricsBucketIdentifier::new(TimeRange {
+ start_time: match (granularity, i.start_bucket) {
+ (Some(g), Some(st)) => g.clip_to_start(st)?,
+ _ => time_range.start_time,
+ },
+ end_time: granularity.as_ref().map_or_else(
+ || Ok(time_range.end_time),
+ |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
+ )?,
+ }),
+ i,
+ ))
+ })
+ .collect::,
+ crate::query::PostProcessingError,
+ >>()
+ .change_context(MetricsError::PostProcessingFailure)
+ }
+}
diff --git a/crates/analytics/src/api_event/metrics/latency.rs b/crates/analytics/src/api_event/metrics/latency.rs
new file mode 100644
index 000000000000..379b39fbeb9e
--- /dev/null
+++ b/crates/analytics/src/api_event/metrics/latency.rs
@@ -0,0 +1,138 @@
+use api_models::analytics::{
+ api_event::{ApiEventDimensions, ApiEventFilters, ApiEventMetricsBucketIdentifier},
+ Granularity, TimeRange,
+};
+use common_utils::errors::ReportSwitchExt;
+use error_stack::ResultExt;
+use time::PrimitiveDateTime;
+
+use super::ApiEventMetricRow;
+use crate::{
+ query::{
+ Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql,
+ Window,
+ },
+ types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
+};
+
+#[derive(Default)]
+pub(super) struct MaxLatency;
+
+#[async_trait::async_trait]
+impl super::ApiEventMetric for MaxLatency
+where
+ T: AnalyticsDataSource + super::ApiEventMetricAnalytics,
+ PrimitiveDateTime: ToSql,
+ AnalyticsCollection: ToSql,
+ Granularity: GroupByClause,
+ Aggregate<&'static str>: ToSql,
+ Window<&'static str>: ToSql,
+{
+ async fn load_metrics(
+ &self,
+ _dimensions: &[ApiEventDimensions],
+ merchant_id: &str,
+ filters: &ApiEventFilters,
+ granularity: &Option,
+ time_range: &TimeRange,
+ pool: &T,
+ ) -> MetricsResult> {
+ let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::ApiEvents);
+
+ query_builder
+ .add_select_column(Aggregate::Sum {
+ field: "latency",
+ alias: Some("latency_sum"),
+ })
+ .switch()?;
+
+ query_builder
+ .add_select_column(Aggregate::Count {
+ field: Some("latency"),
+ alias: Some("latency_count"),
+ })
+ .switch()?;
+
+ query_builder
+ .add_select_column(Aggregate::Min {
+ field: "created_at",
+ alias: Some("start_bucket"),
+ })
+ .switch()?;
+ query_builder
+ .add_select_column(Aggregate::Max {
+ field: "created_at",
+ alias: Some("end_bucket"),
+ })
+ .switch()?;
+ if let Some(granularity) = granularity.as_ref() {
+ granularity
+ .set_group_by_clause(&mut query_builder)
+ .attach_printable("Error adding granularity")
+ .switch()?;
+ }
+
+ filters.set_filter_clause(&mut query_builder).switch()?;
+
+ query_builder
+ .add_filter_clause("merchant_id", merchant_id)
+ .switch()?;
+
+ time_range
+ .set_filter_clause(&mut query_builder)
+ .attach_printable("Error filtering time range")
+ .switch()?;
+
+ query_builder
+ .add_custom_filter_clause("request", "10.63.134.6", FilterTypes::NotLike)
+ .attach_printable("Error filtering out locker IP")
+ .switch()?;
+
+ query_builder
+ .execute_query::(pool)
+ .await
+ .change_context(MetricsError::QueryBuildingError)?
+ .change_context(MetricsError::QueryExecutionFailure)?
+ .into_iter()
+ .map(|i| {
+ Ok((
+ ApiEventMetricsBucketIdentifier::new(TimeRange {
+ start_time: match (granularity, i.start_bucket) {
+ (Some(g), Some(st)) => g.clip_to_start(st)?,
+ _ => time_range.start_time,
+ },
+ end_time: granularity.as_ref().map_or_else(
+ || Ok(time_range.end_time),
+ |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
+ )?,
+ }),
+ ApiEventMetricRow {
+ latency: if i.latency_count != 0 {
+ Some(i.latency_sum.unwrap_or(0) / i.latency_count)
+ } else {
+ None
+ },
+ api_count: None,
+ status_code_count: None,
+ start_bucket: i.start_bucket,
+ end_bucket: i.end_bucket,
+ },
+ ))
+ })
+ .collect::,
+ crate::query::PostProcessingError,
+ >>()
+ .change_context(MetricsError::PostProcessingFailure)
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, serde::Deserialize)]
+pub struct LatencyAvg {
+ latency_sum: Option,
+ latency_count: u64,
+ #[serde(with = "common_utils::custom_serde::iso8601::option")]
+ pub start_bucket: Option,
+ #[serde(with = "common_utils::custom_serde::iso8601::option")]
+ pub end_bucket: Option,
+}
diff --git a/crates/analytics/src/api_event/metrics/status_code_count.rs b/crates/analytics/src/api_event/metrics/status_code_count.rs
new file mode 100644
index 000000000000..5c652fd8e0c9
--- /dev/null
+++ b/crates/analytics/src/api_event/metrics/status_code_count.rs
@@ -0,0 +1,103 @@
+use api_models::analytics::{
+ api_event::{ApiEventDimensions, ApiEventFilters, ApiEventMetricsBucketIdentifier},
+ Granularity, TimeRange,
+};
+use common_utils::errors::ReportSwitchExt;
+use error_stack::ResultExt;
+use time::PrimitiveDateTime;
+
+use super::ApiEventMetricRow;
+use crate::{
+ query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
+ types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
+};
+
+#[derive(Default)]
+pub(super) struct StatusCodeCount;
+
+#[async_trait::async_trait]
+impl super::ApiEventMetric for StatusCodeCount
+where
+ T: AnalyticsDataSource + super::ApiEventMetricAnalytics,
+ PrimitiveDateTime: ToSql,
+ AnalyticsCollection: ToSql,
+ Granularity: GroupByClause,
+ Aggregate<&'static str>: ToSql,
+ Window<&'static str>: ToSql,
+{
+ async fn load_metrics(
+ &self,
+ _dimensions: &[ApiEventDimensions],
+ merchant_id: &str,
+ filters: &ApiEventFilters,
+ granularity: &Option,
+ time_range: &TimeRange,
+ pool: &T,
+ ) -> MetricsResult> {
+ let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::ApiEvents);
+
+ query_builder
+ .add_select_column(Aggregate::Count {
+ field: Some("status_code"),
+ alias: Some("status_code_count"),
+ })
+ .switch()?;
+
+ filters.set_filter_clause(&mut query_builder).switch()?;
+
+ query_builder
+ .add_filter_clause("merchant_id", merchant_id)
+ .switch()?;
+
+ time_range
+ .set_filter_clause(&mut query_builder)
+ .attach_printable("Error filtering time range")
+ .switch()?;
+
+ query_builder
+ .add_select_column(Aggregate::Min {
+ field: "created_at",
+ alias: Some("start_bucket"),
+ })
+ .switch()?;
+ query_builder
+ .add_select_column(Aggregate::Max {
+ field: "created_at",
+ alias: Some("end_bucket"),
+ })
+ .switch()?;
+ if let Some(granularity) = granularity.as_ref() {
+ granularity
+ .set_group_by_clause(&mut query_builder)
+ .attach_printable("Error adding granularity")
+ .switch()?;
+ }
+
+ query_builder
+ .execute_query::(pool)
+ .await
+ .change_context(MetricsError::QueryBuildingError)?
+ .change_context(MetricsError::QueryExecutionFailure)?
+ .into_iter()
+ .map(|i| {
+ Ok((
+ ApiEventMetricsBucketIdentifier::new(TimeRange {
+ start_time: match (granularity, i.start_bucket) {
+ (Some(g), Some(st)) => g.clip_to_start(st)?,
+ _ => time_range.start_time,
+ },
+ end_time: granularity.as_ref().map_or_else(
+ || Ok(time_range.end_time),
+ |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
+ )?,
+ }),
+ i,
+ ))
+ })
+ .collect::,
+ crate::query::PostProcessingError,
+ >>()
+ .change_context(MetricsError::PostProcessingFailure)
+ }
+}
diff --git a/crates/analytics/src/api_event/types.rs b/crates/analytics/src/api_event/types.rs
new file mode 100644
index 000000000000..72205fc72abf
--- /dev/null
+++ b/crates/analytics/src/api_event/types.rs
@@ -0,0 +1,33 @@
+use api_models::analytics::api_event::{ApiEventDimensions, ApiEventFilters};
+use error_stack::ResultExt;
+
+use crate::{
+ query::{QueryBuilder, QueryFilter, QueryResult, ToSql},
+ types::{AnalyticsCollection, AnalyticsDataSource},
+};
+
+impl QueryFilter for ApiEventFilters
+where
+ T: AnalyticsDataSource,
+ AnalyticsCollection: ToSql,
+{
+ fn set_filter_clause(&self, builder: &mut QueryBuilder) -> QueryResult<()> {
+ if !self.status_code.is_empty() {
+ builder
+ .add_filter_in_range_clause(ApiEventDimensions::StatusCode, &self.status_code)
+ .attach_printable("Error adding status_code filter")?;
+ }
+ if !self.flow_type.is_empty() {
+ builder
+ .add_filter_in_range_clause(ApiEventDimensions::FlowType, &self.flow_type)
+ .attach_printable("Error adding flow_type filter")?;
+ }
+ if !self.api_flow.is_empty() {
+ builder
+ .add_filter_in_range_clause(ApiEventDimensions::ApiFlow, &self.api_flow)
+ .attach_printable("Error adding api_name filter")?;
+ }
+
+ Ok(())
+ }
+}
diff --git a/crates/analytics/src/clickhouse.rs b/crates/analytics/src/clickhouse.rs
new file mode 100644
index 000000000000..964486c93649
--- /dev/null
+++ b/crates/analytics/src/clickhouse.rs
@@ -0,0 +1,458 @@
+use std::sync::Arc;
+
+use actix_web::http::StatusCode;
+use common_utils::errors::ParsingError;
+use error_stack::{IntoReport, Report, ResultExt};
+use router_env::logger;
+use time::PrimitiveDateTime;
+
+use super::{
+ payments::{
+ distribution::PaymentDistributionRow, filters::FilterRow, metrics::PaymentMetricRow,
+ },
+ query::{Aggregate, ToSql, Window},
+ refunds::{filters::RefundFilterRow, metrics::RefundMetricRow},
+ sdk_events::{filters::SdkEventFilter, metrics::SdkEventMetricRow},
+ types::{AnalyticsCollection, AnalyticsDataSource, LoadRow, QueryExecutionError},
+};
+use crate::{
+ api_event::{
+ events::ApiLogsResult,
+ filters::ApiEventFilter,
+ metrics::{latency::LatencyAvg, ApiEventMetricRow},
+ },
+ sdk_events::events::SdkEventsResult,
+ types::TableEngine,
+};
+
+pub type ClickhouseResult = error_stack::Result;
+
+#[derive(Clone, Debug)]
+pub struct ClickhouseClient {
+ pub config: Arc,
+}
+
+#[derive(Clone, Debug, serde::Deserialize)]
+pub struct ClickhouseConfig {
+ username: String,
+ password: Option,
+ host: String,
+ database_name: String,
+}
+
+impl Default for ClickhouseConfig {
+ fn default() -> Self {
+ Self {
+ username: "default".to_string(),
+ password: None,
+ host: "http://localhost:8123".to_string(),
+ database_name: "default".to_string(),
+ }
+ }
+}
+
+impl ClickhouseClient {
+ async fn execute_query(&self, query: &str) -> ClickhouseResult> {
+ logger::debug!("Executing query: {query}");
+ let client = reqwest::Client::new();
+ let params = CkhQuery {
+ date_time_output_format: String::from("iso"),
+ output_format_json_quote_64bit_integers: 0,
+ database: self.config.database_name.clone(),
+ };
+ let response = client
+ .post(&self.config.host)
+ .query(¶ms)
+ .basic_auth(self.config.username.clone(), self.config.password.clone())
+ .body(format!("{query}\nFORMAT JSON"))
+ .send()
+ .await
+ .into_report()
+ .change_context(ClickhouseError::ConnectionError)?;
+
+ logger::debug!(clickhouse_response=?response, query=?query, "Clickhouse response");
+ if response.status() != StatusCode::OK {
+ response.text().await.map_or_else(
+ |er| {
+ Err(ClickhouseError::ResponseError)
+ .into_report()
+ .attach_printable_lazy(|| format!("Error: {er:?}"))
+ },
+ |t| Err(ClickhouseError::ResponseNotOK(t)).into_report(),
+ )
+ } else {
+ Ok(response
+ .json::>()
+ .await
+ .into_report()
+ .change_context(ClickhouseError::ResponseError)?
+ .data)
+ }
+ }
+}
+
+#[async_trait::async_trait]
+impl AnalyticsDataSource for ClickhouseClient {
+ type Row = serde_json::Value;
+
+ async fn load_results(
+ &self,
+ query: &str,
+ ) -> common_utils::errors::CustomResult, QueryExecutionError>
+ where
+ Self: LoadRow,
+ {
+ self.execute_query(query)
+ .await
+ .change_context(QueryExecutionError::DatabaseError)?
+ .into_iter()
+ .map(Self::load_row)
+ .collect::, _>>()
+ .change_context(QueryExecutionError::RowExtractionFailure)
+ }
+
+ fn get_table_engine(table: AnalyticsCollection) -> TableEngine {
+ match table {
+ AnalyticsCollection::Payment
+ | AnalyticsCollection::Refund
+ | AnalyticsCollection::PaymentIntent => {
+ TableEngine::CollapsingMergeTree { sign: "sign_flag" }
+ }
+ AnalyticsCollection::SdkEvents => TableEngine::BasicTree,
+ AnalyticsCollection::ApiEvents => TableEngine::BasicTree,
+ }
+ }
+}
+
+impl LoadRow for ClickhouseClient
+where
+ Self::Row: TryInto>,
+{
+ fn load_row(row: Self::Row) -> common_utils::errors::CustomResult {
+ row.try_into()
+ .change_context(QueryExecutionError::RowExtractionFailure)
+ }
+}
+
+impl super::payments::filters::PaymentFilterAnalytics for ClickhouseClient {}
+impl super::payments::metrics::PaymentMetricAnalytics for ClickhouseClient {}
+impl super::payments::distribution::PaymentDistributionAnalytics for ClickhouseClient {}
+impl super::refunds::metrics::RefundMetricAnalytics for ClickhouseClient {}
+impl super::refunds::filters::RefundFilterAnalytics for ClickhouseClient {}
+impl super::sdk_events::filters::SdkEventFilterAnalytics for ClickhouseClient {}
+impl super::sdk_events::metrics::SdkEventMetricAnalytics for ClickhouseClient {}
+impl super::sdk_events::events::SdkEventsFilterAnalytics for ClickhouseClient {}
+impl super::api_event::events::ApiLogsFilterAnalytics for ClickhouseClient {}
+impl super::api_event::filters::ApiEventFilterAnalytics for ClickhouseClient {}
+impl super::api_event::metrics::ApiEventMetricAnalytics for ClickhouseClient {}
+
+#[derive(Debug, serde::Serialize)]
+struct CkhQuery {
+ date_time_output_format: String,
+ output_format_json_quote_64bit_integers: u8,
+ database: String,
+}
+
+#[derive(Debug, serde::Deserialize)]
+struct CkhOutput {
+ data: Vec,
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse ApiLogsResult in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse SdkEventsResult in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse PaymentMetricRow in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse PaymentDistributionRow in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse FilterRow in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse RefundMetricRow in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse RefundFilterRow in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse ApiEventMetricRow in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse LatencyAvg in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse SdkEventMetricRow in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse SdkEventFilter in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse ApiEventFilter in clickhouse results",
+ ))
+ }
+}
+
+impl ToSql for PrimitiveDateTime {
+ fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result {
+ let format =
+ time::format_description::parse("[year]-[month]-[day] [hour]:[minute]:[second]")
+ .into_report()
+ .change_context(ParsingError::DateTimeParsingError)
+ .attach_printable("Failed to parse format description")?;
+ self.format(&format)
+ .into_report()
+ .change_context(ParsingError::EncodeError(
+ "failed to encode to clickhouse date-time format",
+ ))
+ .attach_printable("Failed to format date time")
+ }
+}
+
+impl ToSql for AnalyticsCollection {
+ fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result {
+ match self {
+ Self::Payment => Ok("payment_attempt_dist".to_string()),
+ Self::Refund => Ok("refund_dist".to_string()),
+ Self::SdkEvents => Ok("sdk_events_dist".to_string()),
+ Self::ApiEvents => Ok("api_audit_log".to_string()),
+ Self::PaymentIntent => Ok("payment_intents_dist".to_string()),
+ }
+ }
+}
+
+impl ToSql for Aggregate
+where
+ T: ToSql,
+{
+ fn to_sql(&self, table_engine: &TableEngine) -> error_stack::Result {
+ Ok(match self {
+ Self::Count { field: _, alias } => {
+ let query = match table_engine {
+ TableEngine::CollapsingMergeTree { sign } => format!("sum({sign})"),
+ TableEngine::BasicTree => "count(*)".to_string(),
+ };
+ format!(
+ "{query}{}",
+ alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
+ )
+ }
+ Self::Sum { field, alias } => {
+ let query = match table_engine {
+ TableEngine::CollapsingMergeTree { sign } => format!(
+ "sum({sign} * {})",
+ field
+ .to_sql(table_engine)
+ .attach_printable("Failed to sum aggregate")?
+ ),
+ TableEngine::BasicTree => format!(
+ "sum({})",
+ field
+ .to_sql(table_engine)
+ .attach_printable("Failed to sum aggregate")?
+ ),
+ };
+ format!(
+ "{query}{}",
+ alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
+ )
+ }
+ Self::Min { field, alias } => {
+ format!(
+ "min({}){}",
+ field
+ .to_sql(table_engine)
+ .attach_printable("Failed to min aggregate")?,
+ alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
+ )
+ }
+ Self::Max { field, alias } => {
+ format!(
+ "max({}){}",
+ field
+ .to_sql(table_engine)
+ .attach_printable("Failed to max aggregate")?,
+ alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
+ )
+ }
+ })
+ }
+}
+
+impl ToSql for Window
+where
+ T: ToSql,
+{
+ fn to_sql(&self, table_engine: &TableEngine) -> error_stack::Result {
+ Ok(match self {
+ Self::Sum {
+ field,
+ partition_by,
+ order_by,
+ alias,
+ } => {
+ format!(
+ "sum({}) over ({}{}){}",
+ field
+ .to_sql(table_engine)
+ .attach_printable("Failed to sum window")?,
+ partition_by.as_ref().map_or_else(
+ || "".to_owned(),
+ |partition_by| format!("partition by {}", partition_by.to_owned())
+ ),
+ order_by.as_ref().map_or_else(
+ || "".to_owned(),
+ |(order_column, order)| format!(
+ " order by {} {}",
+ order_column.to_owned(),
+ order.to_string()
+ )
+ ),
+ alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
+ )
+ }
+ Self::RowNumber {
+ field: _,
+ partition_by,
+ order_by,
+ alias,
+ } => {
+ format!(
+ "row_number() over ({}{}){}",
+ partition_by.as_ref().map_or_else(
+ || "".to_owned(),
+ |partition_by| format!("partition by {}", partition_by.to_owned())
+ ),
+ order_by.as_ref().map_or_else(
+ || "".to_owned(),
+ |(order_column, order)| format!(
+ " order by {} {}",
+ order_column.to_owned(),
+ order.to_string()
+ )
+ ),
+ alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
+ )
+ }
+ })
+ }
+}
+
+#[derive(Debug, thiserror::Error)]
+pub enum ClickhouseError {
+ #[error("Clickhouse connection error")]
+ ConnectionError,
+ #[error("Clickhouse NON-200 response content: '{0}'")]
+ ResponseNotOK(String),
+ #[error("Clickhouse response error")]
+ ResponseError,
+}
diff --git a/crates/analytics/src/core.rs b/crates/analytics/src/core.rs
new file mode 100644
index 000000000000..354e1e2f1766
--- /dev/null
+++ b/crates/analytics/src/core.rs
@@ -0,0 +1,31 @@
+use api_models::analytics::GetInfoResponse;
+
+use crate::{types::AnalyticsDomain, utils};
+
+pub async fn get_domain_info(
+ domain: AnalyticsDomain,
+) -> crate::errors::AnalyticsResult {
+ let info = match domain {
+ AnalyticsDomain::Payments => GetInfoResponse {
+ metrics: utils::get_payment_metrics_info(),
+ download_dimensions: None,
+ dimensions: utils::get_payment_dimensions(),
+ },
+ AnalyticsDomain::Refunds => GetInfoResponse {
+ metrics: utils::get_refund_metrics_info(),
+ download_dimensions: None,
+ dimensions: utils::get_refund_dimensions(),
+ },
+ AnalyticsDomain::SdkEvents => GetInfoResponse {
+ metrics: utils::get_sdk_event_metrics_info(),
+ download_dimensions: None,
+ dimensions: utils::get_sdk_event_dimensions(),
+ },
+ AnalyticsDomain::ApiEvents => GetInfoResponse {
+ metrics: utils::get_api_event_metrics_info(),
+ download_dimensions: None,
+ dimensions: utils::get_api_event_dimensions(),
+ },
+ };
+ Ok(info)
+}
diff --git a/crates/router/src/analytics/errors.rs b/crates/analytics/src/errors.rs
similarity index 100%
rename from crates/router/src/analytics/errors.rs
rename to crates/analytics/src/errors.rs
diff --git a/crates/analytics/src/lambda_utils.rs b/crates/analytics/src/lambda_utils.rs
new file mode 100644
index 000000000000..f9446a402b4e
--- /dev/null
+++ b/crates/analytics/src/lambda_utils.rs
@@ -0,0 +1,36 @@
+use aws_config::{self, meta::region::RegionProviderChain};
+use aws_sdk_lambda::{config::Region, types::InvocationType::Event, Client};
+use aws_smithy_types::Blob;
+use common_utils::errors::CustomResult;
+use error_stack::{IntoReport, ResultExt};
+
+use crate::errors::AnalyticsError;
+
+async fn get_aws_client(region: String) -> Client {
+ let region_provider = RegionProviderChain::first_try(Region::new(region));
+ let sdk_config = aws_config::from_env().region(region_provider).load().await;
+ Client::new(&sdk_config)
+}
+
+pub async fn invoke_lambda(
+ function_name: &str,
+ region: &str,
+ json_bytes: &[u8],
+) -> CustomResult<(), AnalyticsError> {
+ get_aws_client(region.to_string())
+ .await
+ .invoke()
+ .function_name(function_name)
+ .invocation_type(Event)
+ .payload(Blob::new(json_bytes.to_owned()))
+ .send()
+ .await
+ .into_report()
+ .map_err(|er| {
+ let er_rep = format!("{er:?}");
+ er.attach_printable(er_rep)
+ })
+ .change_context(AnalyticsError::UnknownError)
+ .attach_printable("Lambda invocation failed")?;
+ Ok(())
+}
diff --git a/crates/analytics/src/lib.rs b/crates/analytics/src/lib.rs
new file mode 100644
index 000000000000..24da77f84f2b
--- /dev/null
+++ b/crates/analytics/src/lib.rs
@@ -0,0 +1,509 @@
+mod clickhouse;
+pub mod core;
+pub mod errors;
+pub mod metrics;
+pub mod payments;
+mod query;
+pub mod refunds;
+
+pub mod api_event;
+pub mod sdk_events;
+mod sqlx;
+mod types;
+use api_event::metrics::{ApiEventMetric, ApiEventMetricRow};
+pub use types::AnalyticsDomain;
+pub mod lambda_utils;
+pub mod utils;
+
+use std::sync::Arc;
+
+use api_models::analytics::{
+ api_event::{
+ ApiEventDimensions, ApiEventFilters, ApiEventMetrics, ApiEventMetricsBucketIdentifier,
+ },
+ payments::{PaymentDimensions, PaymentFilters, PaymentMetrics, PaymentMetricsBucketIdentifier},
+ refunds::{RefundDimensions, RefundFilters, RefundMetrics, RefundMetricsBucketIdentifier},
+ sdk_events::{
+ SdkEventDimensions, SdkEventFilters, SdkEventMetrics, SdkEventMetricsBucketIdentifier,
+ },
+ Distribution, Granularity, TimeRange,
+};
+use clickhouse::ClickhouseClient;
+pub use clickhouse::ClickhouseConfig;
+use error_stack::IntoReport;
+use router_env::{
+ logger,
+ tracing::{self, instrument},
+};
+use storage_impl::config::Database;
+
+use self::{
+ payments::{
+ distribution::{PaymentDistribution, PaymentDistributionRow},
+ metrics::{PaymentMetric, PaymentMetricRow},
+ },
+ refunds::metrics::{RefundMetric, RefundMetricRow},
+ sdk_events::metrics::{SdkEventMetric, SdkEventMetricRow},
+ sqlx::SqlxClient,
+ types::MetricsError,
+};
+
+#[derive(Clone, Debug)]
+pub enum AnalyticsProvider {
+ Sqlx(SqlxClient),
+ Clickhouse(ClickhouseClient),
+ CombinedCkh(SqlxClient, ClickhouseClient),
+ CombinedSqlx(SqlxClient, ClickhouseClient),
+}
+
+impl Default for AnalyticsProvider {
+ fn default() -> Self {
+ Self::Sqlx(SqlxClient::default())
+ }
+}
+
+impl ToString for AnalyticsProvider {
+ fn to_string(&self) -> String {
+ String::from(match self {
+ Self::Clickhouse(_) => "Clickhouse",
+ Self::Sqlx(_) => "Sqlx",
+ Self::CombinedCkh(_, _) => "CombinedCkh",
+ Self::CombinedSqlx(_, _) => "CombinedSqlx",
+ })
+ }
+}
+
+impl AnalyticsProvider {
+ #[instrument(skip_all)]
+ pub async fn get_payment_metrics(
+ &self,
+ metric: &PaymentMetrics,
+ dimensions: &[PaymentDimensions],
+ merchant_id: &str,
+ filters: &PaymentFilters,
+ granularity: &Option,
+ time_range: &TimeRange,
+ ) -> types::MetricsResult> {
+ // Metrics to get the fetch time for each payment metric
+ metrics::request::record_operation_time(
+ async {
+ match self {
+ Self::Sqlx(pool) => {
+ metric
+ .load_metrics(
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ pool,
+ )
+ .await
+ }
+ Self::Clickhouse(pool) => {
+ metric
+ .load_metrics(
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ pool,
+ )
+ .await
+ }
+ Self::CombinedCkh(sqlx_pool, ckh_pool) => {
+ let (ckh_result, sqlx_result) = tokio::join!(metric
+ .load_metrics(
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ ckh_pool,
+ ),
+ metric
+ .load_metrics(
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ sqlx_pool,
+ ));
+ match (&sqlx_result, &ckh_result) {
+ (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
+ router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics metrics")
+ },
+ _ => {}
+
+ };
+
+ ckh_result
+ }
+ Self::CombinedSqlx(sqlx_pool, ckh_pool) => {
+ let (ckh_result, sqlx_result) = tokio::join!(metric
+ .load_metrics(
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ ckh_pool,
+ ),
+ metric
+ .load_metrics(
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ sqlx_pool,
+ ));
+ match (&sqlx_result, &ckh_result) {
+ (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
+ router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics metrics")
+ },
+ _ => {}
+
+ };
+
+ sqlx_result
+ }
+ }
+ },
+ &metrics::METRIC_FETCH_TIME,
+ metric,
+ self,
+ )
+ .await
+ }
+
+ pub async fn get_payment_distribution(
+ &self,
+ distribution: &Distribution,
+ dimensions: &[PaymentDimensions],
+ merchant_id: &str,
+ filters: &PaymentFilters,
+ granularity: &Option,
+ time_range: &TimeRange,
+ ) -> types::MetricsResult> {
+ // Metrics to get the fetch time for each payment metric
+ metrics::request::record_operation_time(
+ async {
+ match self {
+ Self::Sqlx(pool) => {
+ distribution.distribution_for
+ .load_distribution(
+ distribution,
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ pool,
+ )
+ .await
+ }
+ Self::Clickhouse(pool) => {
+ distribution.distribution_for
+ .load_distribution(
+ distribution,
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ pool,
+ )
+ .await
+ }
+ Self::CombinedCkh(sqlx_pool, ckh_pool) => {
+ let (ckh_result, sqlx_result) = tokio::join!(distribution.distribution_for
+ .load_distribution(
+ distribution,
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ ckh_pool,
+ ),
+ distribution.distribution_for
+ .load_distribution(
+ distribution,
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ sqlx_pool,
+ ));
+ match (&sqlx_result, &ckh_result) {
+ (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
+ router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics distribution")
+ },
+ _ => {}
+
+ };
+
+ ckh_result
+ }
+ Self::CombinedSqlx(sqlx_pool, ckh_pool) => {
+ let (ckh_result, sqlx_result) = tokio::join!(distribution.distribution_for
+ .load_distribution(
+ distribution,
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ ckh_pool,
+ ),
+ distribution.distribution_for
+ .load_distribution(
+ distribution,
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ sqlx_pool,
+ ));
+ match (&sqlx_result, &ckh_result) {
+ (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
+ router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics distribution")
+ },
+ _ => {}
+
+ };
+
+ sqlx_result
+ }
+ }
+ },
+ &metrics::METRIC_FETCH_TIME,
+ &distribution.distribution_for,
+ self,
+ )
+ .await
+ }
+
+ pub async fn get_refund_metrics(
+ &self,
+ metric: &RefundMetrics,
+ dimensions: &[RefundDimensions],
+ merchant_id: &str,
+ filters: &RefundFilters,
+ granularity: &Option,
+ time_range: &TimeRange,
+ ) -> types::MetricsResult> {
+ // Metrics to get the fetch time for each refund metric
+ metrics::request::record_operation_time(
+ async {
+ match self {
+ Self::Sqlx(pool) => {
+ metric
+ .load_metrics(
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ pool,
+ )
+ .await
+ }
+ Self::Clickhouse(pool) => {
+ metric
+ .load_metrics(
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ pool,
+ )
+ .await
+ }
+ Self::CombinedCkh(sqlx_pool, ckh_pool) => {
+ let (ckh_result, sqlx_result) = tokio::join!(
+ metric.load_metrics(
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ ckh_pool,
+ ),
+ metric.load_metrics(
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ sqlx_pool,
+ )
+ );
+ match (&sqlx_result, &ckh_result) {
+ (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
+ logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres refunds analytics metrics")
+ }
+ _ => {}
+ };
+ ckh_result
+ }
+ Self::CombinedSqlx(sqlx_pool, ckh_pool) => {
+ let (ckh_result, sqlx_result) = tokio::join!(
+ metric.load_metrics(
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ ckh_pool,
+ ),
+ metric.load_metrics(
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ sqlx_pool,
+ )
+ );
+ match (&sqlx_result, &ckh_result) {
+ (Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
+ logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres refunds analytics metrics")
+ }
+ _ => {}
+ };
+ sqlx_result
+ }
+ }
+ },
+ &metrics::METRIC_FETCH_TIME,
+ metric,
+ self,
+ )
+ .await
+ }
+
+ pub async fn get_sdk_event_metrics(
+ &self,
+ metric: &SdkEventMetrics,
+ dimensions: &[SdkEventDimensions],
+ pub_key: &str,
+ filters: &SdkEventFilters,
+ granularity: &Option,
+ time_range: &TimeRange,
+ ) -> types::MetricsResult> {
+ match self {
+ Self::Sqlx(_pool) => Err(MetricsError::NotImplemented).into_report(),
+ Self::Clickhouse(pool) => {
+ metric
+ .load_metrics(dimensions, pub_key, filters, granularity, time_range, pool)
+ .await
+ }
+ Self::CombinedCkh(_sqlx_pool, ckh_pool) | Self::CombinedSqlx(_sqlx_pool, ckh_pool) => {
+ metric
+ .load_metrics(
+ dimensions,
+ pub_key,
+ filters,
+ granularity,
+ // Since SDK events are ckh only use ckh here
+ time_range,
+ ckh_pool,
+ )
+ .await
+ }
+ }
+ }
+
+ pub async fn get_api_event_metrics(
+ &self,
+ metric: &ApiEventMetrics,
+ dimensions: &[ApiEventDimensions],
+ pub_key: &str,
+ filters: &ApiEventFilters,
+ granularity: &Option,
+ time_range: &TimeRange,
+ ) -> types::MetricsResult> {
+ match self {
+ Self::Sqlx(_pool) => Err(MetricsError::NotImplemented).into_report(),
+ Self::Clickhouse(ckh_pool)
+ | Self::CombinedCkh(_, ckh_pool)
+ | Self::CombinedSqlx(_, ckh_pool) => {
+ // Since API events are ckh only use ckh here
+ metric
+ .load_metrics(
+ dimensions,
+ pub_key,
+ filters,
+ granularity,
+ time_range,
+ ckh_pool,
+ )
+ .await
+ }
+ }
+ }
+
+ pub async fn from_conf(config: &AnalyticsConfig) -> Self {
+ match config {
+ AnalyticsConfig::Sqlx { sqlx } => Self::Sqlx(SqlxClient::from_conf(sqlx).await),
+ AnalyticsConfig::Clickhouse { clickhouse } => Self::Clickhouse(ClickhouseClient {
+ config: Arc::new(clickhouse.clone()),
+ }),
+ AnalyticsConfig::CombinedCkh { sqlx, clickhouse } => Self::CombinedCkh(
+ SqlxClient::from_conf(sqlx).await,
+ ClickhouseClient {
+ config: Arc::new(clickhouse.clone()),
+ },
+ ),
+ AnalyticsConfig::CombinedSqlx { sqlx, clickhouse } => Self::CombinedSqlx(
+ SqlxClient::from_conf(sqlx).await,
+ ClickhouseClient {
+ config: Arc::new(clickhouse.clone()),
+ },
+ ),
+ }
+ }
+}
+
+#[derive(Clone, Debug, serde::Deserialize)]
+#[serde(tag = "source")]
+#[serde(rename_all = "lowercase")]
+pub enum AnalyticsConfig {
+ Sqlx {
+ sqlx: Database,
+ },
+ Clickhouse {
+ clickhouse: ClickhouseConfig,
+ },
+ CombinedCkh {
+ sqlx: Database,
+ clickhouse: ClickhouseConfig,
+ },
+ CombinedSqlx {
+ sqlx: Database,
+ clickhouse: ClickhouseConfig,
+ },
+}
+
+impl Default for AnalyticsConfig {
+ fn default() -> Self {
+ Self::Sqlx {
+ sqlx: Database::default(),
+ }
+ }
+}
+
+#[derive(Clone, Debug, serde::Deserialize, Default, serde::Serialize)]
+pub struct ReportConfig {
+ pub payment_function: String,
+ pub refund_function: String,
+ pub dispute_function: String,
+ pub region: String,
+}
diff --git a/crates/analytics/src/main.rs b/crates/analytics/src/main.rs
new file mode 100644
index 000000000000..5bf256ea9783
--- /dev/null
+++ b/crates/analytics/src/main.rs
@@ -0,0 +1,3 @@
+fn main() {
+ println!("Hello world");
+}
diff --git a/crates/router/src/analytics/metrics.rs b/crates/analytics/src/metrics.rs
similarity index 100%
rename from crates/router/src/analytics/metrics.rs
rename to crates/analytics/src/metrics.rs
diff --git a/crates/router/src/analytics/metrics/request.rs b/crates/analytics/src/metrics/request.rs
similarity index 51%
rename from crates/router/src/analytics/metrics/request.rs
rename to crates/analytics/src/metrics/request.rs
index b7c202f2db25..3d1a78808f34 100644
--- a/crates/router/src/analytics/metrics/request.rs
+++ b/crates/analytics/src/metrics/request.rs
@@ -6,24 +6,20 @@ pub fn add_attributes>(
}
#[inline]
-pub async fn record_operation_time(
+pub async fn record_operation_time(
future: F,
metric: &once_cell::sync::Lazy>,
- metric_name: &api_models::analytics::payments::PaymentMetrics,
- source: &crate::analytics::AnalyticsProvider,
+ metric_name: &T,
+ source: &crate::AnalyticsProvider,
) -> R
where
F: futures::Future