diff --git a/.cargo/config.toml b/.cargo/config.toml
index 3082e9635cf9..5b27955262ad 100644
--- a/.cargo/config.toml
+++ b/.cargo/config.toml
@@ -3,9 +3,13 @@ rustflags = [
"-Funsafe_code",
"-Wclippy::as_conversions",
"-Wclippy::expect_used",
+ "-Wclippy::index_refutable_slice",
+ "-Wclippy::indexing_slicing",
+ "-Wclippy::match_on_vec_items",
"-Wclippy::missing_panics_doc",
- "-Wclippy::panic_in_result_fn",
+ "-Wclippy::out_of_bounds_indexing",
"-Wclippy::panic",
+ "-Wclippy::panic_in_result_fn",
"-Wclippy::panicking_unwrap",
"-Wclippy::todo",
"-Wclippy::unimplemented",
@@ -23,10 +27,7 @@ rustflags = [
[build]
-rustdocflags = [
- "--cfg",
- "uuid_unstable"
-]
+rustdocflags = ["--cfg", "uuid_unstable"]
[alias]
gen-pg = "generate --path ../../../../connector-template -n"
diff --git a/.dockerignore b/.dockerignore
index 62804a712fa1..81ef10ad2133 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -261,7 +261,3 @@ result*
# node_modules
node_modules/
-
-**/connector_auth.toml
-**/sample_auth.toml
-**/auth.toml
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 638d5540d3d6..0eb3d95bfc69 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -23,6 +23,17 @@ postman/ @juspay/hyperswitch-framework
Cargo.toml @juspay/hyperswitch-framework
Cargo.lock @juspay/hyperswitch-framework
+crates/api_models/src/events/ @juspay/hyperswitch-analytics
+crates/api_models/src/events.rs @juspay/hyperswitch-analytics
+crates/api_models/src/analytics/ @juspay/hyperswitch-analytics
+crates/api_models/src/analytics.rs @juspay/hyperswitch-analytics
+crates/router/src/analytics.rs @juspay/hyperswitch-analytics
+crates/router/src/events/ @juspay/hyperswitch-analytics
+crates/router/src/events.rs @juspay/hyperswitch-analytics
+crates/common_utils/src/events/ @juspay/hyperswitch-analytics
+crates/common_utils/src/events.rs @juspay/hyperswitch-analytics
+crates/analytics/ @juspay/hyperswitch-analytics
+
connector-template/ @juspay/hyperswitch-connector
crates/router/src/connector/ @juspay/hyperswitch-connector
crates/router/tests/connectors/ @juspay/hyperswitch-connector
@@ -33,6 +44,71 @@ crates/router/src/compatibility/ @juspay/hyperswitch-compatibility
crates/router/src/core/ @juspay/hyperswitch-core
+crates/api_models/src/routing.rs @juspay/hyperswitch-routing
+crates/euclid @juspay/hyperswitch-routing
+crates/euclid_macros @juspay/hyperswitch-routing
+crates/euclid_wasm @juspay/hyperswitch-routing
+crates/kgraph_utils @juspay/hyperswitch-routing
+crates/router/src/routes/routing.rs @juspay/hyperswitch-routing
+crates/router/src/core/routing @juspay/hyperswitch-routing
+crates/router/src/core/routing.rs @juspay/hyperswitch-routing
+crates/router/src/core/payments/routing @juspay/hyperswitch-routing
+crates/router/src/core/payments/routing.rs @juspay/hyperswitch-routing
+
+crates/api_models/src/connector_onboarding.rs @juspay/hyperswitch-dashboard
+crates/api_models/src/user @juspay/hyperswitch-dashboard
+crates/api_models/src/user.rs @juspay/hyperswitch-dashboard
+crates/api_models/src/user_role.rs @juspay/hyperswitch-dashboard
+crates/api_models/src/verify_connector.rs @juspay/hyperswitch-dashboard
+crates/api_models/src/connector_onboarding.rs @juspay/hyperswitch-dashboard
+crates/diesel_models/src/query/dashboard_metadata.rs @juspay/hyperswitch-dashboard
+crates/diesel_models/src/query/user @juspay/hyperswitch-dashboard
+crates/diesel_models/src/query/user_role.rs @juspay/hyperswitch-dashboard
+crates/diesel_models/src/query/user.rs @juspay/hyperswitch-dashboard
+crates/diesel_models/src/user @juspay/hyperswitch-dashboard
+crates/diesel_models/src/user.rs @juspay/hyperswitch-dashboard
+crates/diesel_models/src/user_role.rs @juspay/hyperswitch-dashboard
+crates/router/src/consts/user.rs @juspay/hyperswitch-dashboard
+crates/router/src/consts/user_role.rs @juspay/hyperswitch-dashboard
+crates/router/src/core/connector_onboarding @juspay/hyperswitch-dashboard
+crates/router/src/core/connector_onboarding.rs @juspay/hyperswitch-dashboard
+crates/router/src/core/errors/user.rs @juspay/hyperswitch-dashboard
+crates/router/src/core/errors/user @juspay/hyperswitch-dashboard
+crates/router/src/core/user @juspay/hyperswitch-dashboard
+crates/router/src/core/user.rs @juspay/hyperswitch-dashboard
+crates/router/src/core/user_role.rs @juspay/hyperswitch-dashboard
+crates/router/src/core/verify_connector.rs @juspay/hyperswitch-dashboard
+crates/router/src/db/dashboard_metadata.rs @juspay/hyperswitch-dashboard
+crates/router/src/db/user @juspay/hyperswitch-dashboard
+crates/router/src/db/user.rs @juspay/hyperswitch-dashboard
+crates/router/src/db/user_role.rs @juspay/hyperswitch-dashboard
+crates/router/src/routes/connector_onboarding.rs @juspay/hyperswitch-dashboard
+crates/router/src/routes/dummy_connector @juspay/hyperswitch-dashboard
+crates/router/src/routes/dummy_connector.rs @juspay/hyperswitch-dashboard
+crates/router/src/routes/user.rs @juspay/hyperswitch-dashboard
+crates/router/src/routes/user_role.rs @juspay/hyperswitch-dashboard
+crates/router/src/routes/verify_connector.rs @juspay/hyperswitch-dashboard
+crates/router/src/services/authentication.rs @juspay/hyperswitch-dashboard
+crates/router/src/services/authorization @juspay/hyperswitch-dashboard
+crates/router/src/services/authorization.rs @juspay/hyperswitch-dashboard
+crates/router/src/services/jwt.rs @juspay/hyperswitch-dashboard
+crates/router/src/services/email/types.rs @juspay/hyperswitch-dashboard
+crates/router/src/types/api/connector_onboarding @juspay/hyperswitch-dashboard
+crates/router/src/types/api/connector_onboarding.rs @juspay/hyperswitch-dashboard
+crates/router/src/types/api/verify_connector @juspay/hyperswitch-dashboard
+crates/router/src/types/api/verify_connector.rs @juspay/hyperswitch-dashboard
+crates/router/src/types/domain/user @juspay/hyperswitch-dashboard
+crates/router/src/types/domain/user.rs @juspay/hyperswitch-dashboard
+crates/router/src/types/storage/user.rs @juspay/hyperswitch-dashboard
+crates/router/src/types/storage/user_role.rs @juspay/hyperswitch-dashboard
+crates/router/src/types/storage/dashboard_metadata.rs @juspay/hyperswitch-dashboard
+crates/router/src/utils/connector_onboarding @juspay/hyperswitch-dashboard
+crates/router/src/utils/connector_onboarding.rs @juspay/hyperswitch-dashboard
+crates/router/src/utils/user @juspay/hyperswitch-dashboard
+crates/router/src/utils/user.rs @juspay/hyperswitch-dashboard
+crates/router/src/utils/user_role.rs @juspay/hyperswitch-dashboard
+crates/router/src/utils/verify_connector.rs @juspay/hyperswitch-dashboard
+
crates/router/src/scheduler/ @juspay/hyperswitch-process-tracker
Dockerfile @juspay/hyperswitch-infra
diff --git a/.github/git-cliff-changelog.toml b/.github/git-cliff-changelog.toml
index 1d7e4080cc20..f9959eebfc69 100644
--- a/.github/git-cliff-changelog.toml
+++ b/.github/git-cliff-changelog.toml
@@ -14,7 +14,7 @@ body = """
{% set commit_base_url = "https://github.com/juspay/hyperswitch/commit/" -%}
{% set compare_base_url = "https://github.com/juspay/hyperswitch/compare/" -%}
{% if version -%}
- ## {{ version | trim_start_matches(pat="v") }} ({{ timestamp | date(format="%Y-%m-%d") }})
+ ## {{ version }}
{% else -%}
## [unreleased]
{% endif -%}
@@ -69,7 +69,8 @@ commit_parsers = [
{ message = "^(?i)(refactor)", group = "Refactors" },
{ message = "^(?i)(test)", group = "Testing" },
{ message = "^(?i)(docs)", group = "Documentation" },
- { message = "^(?i)(chore\\(version\\)): V[\\d]+\\.[\\d]+\\.[\\d]+", skip = true },
+ { message = "^(?i)(chore\\(version\\)): (V|v)[\\d]+\\.[\\d]+\\.[\\d]+", skip = true },
+ { message = "^(?i)(chore\\(version\\)): [0-9]{4}\\.[0-9]{2}\\.[0-9]{2}(\\.[0-9]+)?(-.+)?", skip = true },
{ message = "^(?i)(chore)", group = "Miscellaneous Tasks" },
{ message = "^(?i)(build)", group = "Build System / Dependencies" },
{ message = "^(?i)(ci)", skip = true },
@@ -79,7 +80,7 @@ protect_breaking_commits = false
# filter out the commits that are not matched by commit parsers
filter_commits = false
# glob pattern for matching git tags
-tag_pattern = "v[0-9]*"
+tag_pattern = "[0-9]{4}\\.[0-9]{2}\\.[0-9]{2}(\\.[0-9]+)?(-.+)?"
# regex for skipping tags
# skip_tags = "v0.1.0-beta.1"
# regex for ignoring tags
diff --git a/.github/git-cliff-release.toml b/.github/git-cliff-release.toml
deleted file mode 100644
index 1b82c812b5d8..000000000000
--- a/.github/git-cliff-release.toml
+++ /dev/null
@@ -1,89 +0,0 @@
-# configuration file for git-cliff
-# see https://github.com/orhun/git-cliff#configuration-file
-
-[changelog]
-# changelog header
-header = ""
-# template for the changelog body
-# https://tera.netlify.app/docs/#introduction
-body = """
-{% set newline = "\n" -%}
-{% set commit_base_url = "https://github.com/juspay/hyperswitch/commit/" -%}
-{% set compare_base_url = "https://github.com/juspay/hyperswitch/compare/" -%}
-{% if version -%}
- ## {{ version | trim_start_matches(pat="v") }} ({{ timestamp | date(format="%Y-%m-%d") }})
-{% else -%}
- ## [unreleased]
-{% endif -%}
-{% for group, commits in commits | group_by(attribute="group") %}
- {# The `striptags` removes the HTML comments added while grouping -#}
- ### {{ group | striptags | trim | upper_first }}
- {% for scope, commits in commits | group_by(attribute="scope") %}
- - {{ "**" ~ scope ~ ":" ~ "**" -}}
- {% for commit in commits -%}
- {% if commits | length != 1 %}{{ newline ~ " - " }}{% else %}{{ " " }}{% endif -%}
- {{ commit.message | upper_first | trim }} ([`{{ commit.id | truncate(length=7, end="") }}`]({{ commit_base_url ~ commit.id }})) by {{ commit.author.email -}}
- {%- endfor -%}
- {%- endfor -%}
- {%- for commit in commits -%}
- {% if commit.scope %}{% else %}
- - {{ commit.message | upper_first | trim }} ([`{{ commit.id | truncate(length=7, end="") }}`]({{ commit_base_url ~ commit.id }})) by {{ commit.author.email -}}
- {%- endif %}
- {%- endfor %}
-{% endfor %}
-{% if previous and previous.commit_id and commit_id -%}
- **Full Changelog:** [`{{ previous.version }}...{{ version }}`]({{ compare_base_url }}{{ previous.version }}...{{ version }})\n
-{% endif %}
-"""
-# remove the leading and trailing whitespace from the template
-trim = true
-# changelog footer
-footer = ""
-
-[git]
-# parse the commits based on https://www.conventionalcommits.org
-conventional_commits = true
-# filter out the commits that are not conventional
-filter_unconventional = false
-# process each line of a commit as an individual commit
-split_commits = false
-# regex for preprocessing the commit messages
-commit_preprocessors = [
- { pattern = "^ +", replace = "" }, # remove spaces at the beginning of the message
- { pattern = " +", replace = " " }, # replace multiple spaces with a single space
- { pattern = "\\(#([0-9]+)\\)", replace = "([#${1}](https://github.com/juspay/hyperswitch/pull/${1}))" }, # replace PR numbers with links
- { pattern = "(\\n?Co-authored-by: .+ <.+@.+>\\n?)+", replace = "" }, # remove co-author information
- { pattern = "(\\n?Signed-off-by: .+ <.+@.+>\\n?)+", replace = "" }, # remove sign-off information
-]
-# regex for parsing and grouping commits
-# the HTML comments (``) are a workaround to get sections in custom order, since `git-cliff` sorts sections in alphabetical order
-# reference: https://github.com/orhun/git-cliff/issues/9
-commit_parsers = [
- { message = "^(?i)(feat)", group = "Features" },
- { message = "^(?i)(fix)", group = "Bug Fixes" },
- { message = "^(?i)(perf)", group = "Performance" },
- { body = ".*security", group = "Security" },
- { message = "^(?i)(refactor)", group = "Refactors" },
- { message = "^(?i)(test)", group = "Testing" },
- { message = "^(?i)(docs)", group = "Documentation" },
- { message = "^(?i)(chore\\(version\\)): V[\\d]+\\.[\\d]+\\.[\\d]+", skip = true },
- { message = "^(?i)(chore)", group = "Miscellaneous Tasks" },
- { message = "^(?i)(build)", group = "Build System / Dependencies" },
- { message = "^(?i)(ci)", skip = true },
-]
-# protect breaking changes from being skipped due to matching a skipping commit_parser
-protect_breaking_commits = false
-# filter out the commits that are not matched by commit parsers
-filter_commits = false
-# glob pattern for matching git tags
-tag_pattern = "v[0-9]*"
-# regex for skipping tags
-# skip_tags = "v0.1.0-beta.1"
-# regex for ignoring tags
-# ignore_tags = ""
-# sort the tags topologically
-topo_order = true
-# sort the commits inside sections by oldest/newest order
-sort_commits = "oldest"
-# limit the number of commits included in the changelog.
-# limit_commits = 42
diff --git a/.github/secrets/connector_auth.toml.gpg b/.github/secrets/connector_auth.toml.gpg
deleted file mode 100644
index 7da9189ade58..000000000000
Binary files a/.github/secrets/connector_auth.toml.gpg and /dev/null differ
diff --git a/.github/workflows/CI-pr.yml b/.github/workflows/CI-pr.yml
index c79ffa63709a..d6b3d98b8c82 100644
--- a/.github/workflows/CI-pr.yml
+++ b/.github/workflows/CI-pr.yml
@@ -41,17 +41,25 @@ jobs:
name: Check formatting
runs-on: ubuntu-latest
steps:
+ - name: Generate a token
+ if: ${{ github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name }}
+ id: generate_token
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ secrets.HYPERSWITCH_BOT_APP_ID }}
+ private-key: ${{ secrets.HYPERSWITCH_BOT_APP_PRIVATE_KEY }}
+
- name: Checkout repository with token
if: ${{ github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name }}
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ github.event.pull_request.head.ref }}
- token: ${{ secrets.AUTO_FILE_UPDATE_PAT }}
+ token: ${{ steps.generate_token.outputs.token }}
- name: Checkout repository for fork
if: ${{ github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name }}
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@master
@@ -71,8 +79,8 @@ jobs:
cargo +nightly fmt --all
if ! git diff --exit-code --quiet -- crates; then
echo "::notice::Formatting check failed"
- git config --local user.name 'github-actions[bot]'
- git config --local user.email '41898282+github-actions[bot]@users.noreply.github.com'
+ git config --local user.name 'hyperswitch-bot[bot]'
+ git config --local user.email '148525504+hyperswitch-bot[bot]@users.noreply.github.com'
git add crates
git commit --message 'chore: run formatter'
git push
@@ -91,7 +99,7 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: "Fetch base branch"
shell: bash
@@ -108,12 +116,12 @@ jobs:
with:
toolchain: 1.65
- - uses: Swatinem/rust-cache@v2.4.0
+ - uses: Swatinem/rust-cache@v2.7.0
with:
save-if: ${{ github.event_name == 'push' }}
- name: Install cargo-hack
- uses: baptiste0928/cargo-install@v2.1.0
+ uses: baptiste0928/cargo-install@v2.2.0
with:
crate: cargo-hack
version: 0.6.5
@@ -122,149 +130,53 @@ jobs:
shell: bash
run: sed -i 's/rustflags = \[/rustflags = \[\n "-Dwarnings",/' .cargo/config.toml
- - name: Check files changed
+ - name: Check modified crates
shell: bash
run: |
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/api_models/; then
- echo "api_models_changes_exist=false" >> $GITHUB_ENV
- else
- echo "api_models_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/cards/; then
- echo "cards_changes_exist=false" >> $GITHUB_ENV
- else
- echo "cards_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/common_enums/; then
- echo "common_enums_changes_exist=false" >> $GITHUB_ENV
- else
- echo "common_enums_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/common_utils/; then
- echo "common_utils_changes_exist=false" >> $GITHUB_ENV
- else
- echo "common_utils_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/diesel_models/; then
- echo "diesel_models_changes_exist=false" >> $GITHUB_ENV
- else
- echo "diesel_models_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/drainer/; then
- echo "drainer_changes_exist=false" >> $GITHUB_ENV
- else
- echo "drainer_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/external_services/; then
- echo "external_services_changes_exist=false" >> $GITHUB_ENV
- else
- echo "external_services_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/masking/; then
- echo "masking_changes_exist=false" >> $GITHUB_ENV
- else
- echo "masking_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/redis_interface/; then
- echo "redis_interface_changes_exist=false" >> $GITHUB_ENV
- else
- echo "redis_interface_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/router/; then
- echo "router_changes_exist=false" >> $GITHUB_ENV
- else
- echo "router_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/storage_impl/; then
- echo "storage_impl_changes_exist=false" >> $GITHUB_ENV
- else
- echo "storage_impl_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/router_derive/; then
- echo "router_derive_changes_exist=false" >> $GITHUB_ENV
- else
- echo "router_derive_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/router_env/; then
- echo "router_env_changes_exist=false" >> $GITHUB_ENV
- else
- echo "router_env_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/test_utils/; then
- echo "test_utils_changes_exist=false" >> $GITHUB_ENV
- else
- echo "test_utils_changes_exist=true" >> $GITHUB_ENV
- fi
-
- - name: Cargo hack api_models
- if: env.api_models_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p api_models
-
- - name: Cargo hack cards
- if: env.cards_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p cards
-
- - name: Cargo hack common_enums
- if: env.common_enums_changes_exist == 'true'
+ # Obtain a list of workspace members
+ workspace_members="$(
+ cargo metadata --format-version 1 --no-deps \
+ | jq --compact-output --monochrome-output --raw-output '.workspace_members | sort | .[] | split(" ")[0]'
+ )"
+
+ PACKAGES_CHECKED=()
+ PACKAGES_SKIPPED=()
+
+ while IFS= read -r package_name; do
+ # Obtain comma-separated list of transitive workspace dependencies for each workspace member
+ change_paths="$(cargo tree --all-features --no-dedupe --prefix none --package "${package_name}" \
+ | grep 'crates/' \
+ | sort --unique \
+ | awk --field-separator ' ' '{ printf "crates/%s\n", $1 }' | paste -d ',' -s -)"
+
+ # Store change paths in an array by splitting `change_paths` by comma
+ IFS=',' read -ra change_paths <<< "${change_paths}"
+
+ # A package must be checked if any of its transitive dependencies (or itself) has been modified
+ if git diff --exit-code --quiet "origin/${GITHUB_BASE_REF}" -- "${change_paths[@]}"; then
+ printf '::debug::Skipping `%s` since none of these paths were modified: %s\n' "${package_name}" "${change_paths[*]}"
+ PACKAGES_SKIPPED+=("${package_name}")
+ else
+ printf '::debug::Checking `%s` since at least one of these paths was modified: %s\n' "${package_name}" "${change_paths[*]}"
+ PACKAGES_CHECKED+=("${package_name}")
+ fi
+ done <<< "${workspace_members}"
+
+ printf '::notice::Packages checked: %s; Packages skipped: %s\n' "${PACKAGES_CHECKED[*]}" "${PACKAGES_SKIPPED[*]}"
+ echo "PACKAGES_CHECKED=${PACKAGES_CHECKED[*]}" >> ${GITHUB_ENV}
+ echo "PACKAGES_SKIPPED=${PACKAGES_SKIPPED[*]}" >> ${GITHUB_ENV}
+
+ - name: Run `cargo hack` on modified crates
shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p common_enums
-
- - name: Cargo hack common_utils
- if: env.common_utils_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p common_utils
-
- - name: Cargo hack diesel_models
- if: env.diesel_models_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p diesel_models
-
- - name: Cargo hack drainer
- if: env.drainer_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p drainer
-
- - name: Cargo hack external_services
- if: env.external_services_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p external_services
-
- - name: Cargo hack masking
- if: env.masking_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p masking
-
- - name: Cargo hack redis_interface
- if: env.redis_interface_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p redis_interface
-
- - name: Cargo hack router
- if: env.router_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --skip kms,basilisk,kv_store,accounts_cache,openapi --no-dev-deps -p router
-
- - name: Cargo hack storage_impl
- if: env.storage_impl_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p storage_impl
-
- - name: Cargo hack router_derive
- if: env.router_derive_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p router_derive
-
- - name: Cargo hack router_env
- if: env.router_env_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p router_env
+ run: |
+ # Store packages to check in an array by splitting `PACKAGES_CHECKED` by space
+ IFS=' ' read -ra PACKAGES_CHECKED <<< "${PACKAGES_CHECKED}"
- - name: Cargo hack test_utils
- if: env.test_utils_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p test_utils
+ for package in "${PACKAGES_CHECKED[@]}"; do
+ printf '::group::Running `cargo hack` on package `%s`\n' "${package}"
+ cargo hack check --each-feature --all-targets --package "${package}"
+ echo '::endgroup::'
+ done
# cargo-deny:
# name: Run cargo-deny
@@ -280,7 +192,7 @@ jobs:
# steps:
# - name: Checkout repository
- # uses: actions/checkout@v3
+ # uses: actions/checkout@v4
# - name: Run cargo-deny
# uses: EmbarkStudios/cargo-deny-action@v1.3.2
@@ -299,17 +211,25 @@ jobs:
# - windows-latest
steps:
+ - name: Generate a token
+ if: ${{ github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name }}
+ id: generate_token
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ secrets.HYPERSWITCH_BOT_APP_ID }}
+ private-key: ${{ secrets.HYPERSWITCH_BOT_APP_PRIVATE_KEY }}
+
- name: Checkout repository for fork
if: ${{ (github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name) }}
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Checkout repository with token
if: ${{ (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) }}
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ github.event.pull_request.head.ref }}
- token: ${{ secrets.AUTO_FILE_UPDATE_PAT }}
+ token: ${{ steps.generate_token.outputs.token }}
- name: "Fetch base branch"
shell: bash
@@ -328,16 +248,16 @@ jobs:
components: clippy
- name: Install cargo-hack
- uses: baptiste0928/cargo-install@v2.1.0
+ uses: baptiste0928/cargo-install@v2.2.0
with:
crate: cargo-hack
# - name: Install cargo-nextest
- # uses: baptiste0928/cargo-install@v2.1.0
+ # uses: baptiste0928/cargo-install@v2.2.0
# with:
# crate: cargo-nextest
- - uses: Swatinem/rust-cache@v2.4.0
+ - uses: Swatinem/rust-cache@v2.7.0
with:
save-if: ${{ github.event_name == 'push' }}
@@ -360,163 +280,67 @@ jobs:
shell: bash
run: |
if ! git diff --quiet --exit-code -- Cargo.lock ; then
- git config --local user.name 'github-actions[bot]'
- git config --local user.email '41898282+github-actions[bot]@users.noreply.github.com'
+ git config --local user.name 'hyperswitch-bot[bot]'
+ git config --local user.email '148525504+hyperswitch-bot[bot]@users.noreply.github.com'
git add Cargo.lock
git commit --message 'chore: update Cargo.lock'
git push
fi
- - name: Check files changed
+ - name: Check modified crates
shell: bash
run: |
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/api_models/; then
- echo "api_models_changes_exist=false" >> $GITHUB_ENV
- else
- echo "api_models_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/cards/; then
- echo "cards_changes_exist=false" >> $GITHUB_ENV
- else
- echo "cards_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/common_enums/; then
- echo "common_enums_changes_exist=false" >> $GITHUB_ENV
- else
- echo "common_enums_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/common_utils/; then
- echo "common_utils_changes_exist=false" >> $GITHUB_ENV
- else
- echo "common_utils_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/diesel_models/; then
- echo "diesel_models_changes_exist=false" >> $GITHUB_ENV
- else
- echo "diesel_models_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/drainer/; then
- echo "drainer_changes_exist=false" >> $GITHUB_ENV
- else
- echo "drainer_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/external_services/; then
- echo "external_services_changes_exist=false" >> $GITHUB_ENV
- else
- echo "external_services_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/masking/; then
- echo "masking_changes_exist=false" >> $GITHUB_ENV
- else
- echo "masking_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/redis_interface/; then
- echo "redis_interface_changes_exist=false" >> $GITHUB_ENV
- else
- echo "redis_interface_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/router/; then
- echo "router_changes_exist=false" >> $GITHUB_ENV
- else
- echo "router_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/router_derive/; then
- echo "router_derive_changes_exist=false" >> $GITHUB_ENV
- else
- echo "router_derive_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/storage_impl/; then
- echo "storage_impl_changes_exist=false" >> $GITHUB_ENV
- else
- echo "storage_impl_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/router_env/; then
- echo "router_env_changes_exist=false" >> $GITHUB_ENV
- else
- echo "router_env_changes_exist=true" >> $GITHUB_ENV
- fi
- if git diff --exit-code --quiet origin/$GITHUB_BASE_REF -- crates/test_utils/; then
- echo "test_utils_changes_exist=false" >> $GITHUB_ENV
- else
- echo "test_utils_changes_exist=true" >> $GITHUB_ENV
- fi
-
- - name: Cargo hack api_models
- if: env.api_models_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p api_models
-
- - name: Cargo hack cards
- if: env.cards_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p cards
-
- - name: Cargo hack common_enums
- if: env.common_enums_changes_exist == 'true'
+ # Obtain a list of workspace members
+ workspace_members="$(
+ cargo metadata --format-version 1 --no-deps \
+ | jq --compact-output --monochrome-output --raw-output '.workspace_members | sort | .[] | split(" ")[0]'
+ )"
+
+ PACKAGES_CHECKED=()
+ PACKAGES_SKIPPED=()
+
+ while IFS= read -r package_name; do
+ # Obtain comma-separated list of transitive workspace dependencies for each workspace member
+ change_paths="$(cargo tree --all-features --no-dedupe --prefix none --package "${package_name}" \
+ | grep 'crates/' \
+ | sort --unique \
+ | awk --field-separator ' ' '{ printf "crates/%s\n", $1 }' | paste -d ',' -s -)"
+
+ # Store change paths in an array by splitting `change_paths` by comma
+ IFS=',' read -ra change_paths <<< "${change_paths}"
+
+ # A package must be checked if any of its transitive dependencies (or itself) has been modified
+ if git diff --exit-code --quiet "origin/${GITHUB_BASE_REF}" -- "${change_paths[@]}"; then
+ printf '::debug::Skipping `%s` since none of these paths were modified: %s\n' "${package_name}" "${change_paths[*]}"
+ PACKAGES_SKIPPED+=("${package_name}")
+ else
+ printf '::debug::Checking `%s` since at least one of these paths was modified: %s\n' "${package_name}" "${change_paths[*]}"
+ PACKAGES_CHECKED+=("${package_name}")
+ fi
+ done <<< "${workspace_members}"
+
+ printf '::notice::Packages checked: %s; Packages skipped: %s\n' "${PACKAGES_CHECKED[*]}" "${PACKAGES_SKIPPED[*]}"
+ echo "PACKAGES_CHECKED=${PACKAGES_CHECKED[*]}" >> ${GITHUB_ENV}
+ echo "PACKAGES_SKIPPED=${PACKAGES_SKIPPED[*]}" >> ${GITHUB_ENV}
+
+ - name: Run `cargo hack` on modified crates
shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p common_enums
-
- - name: Cargo hack common_utils
- if: env.common_utils_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p common_utils
-
- - name: Cargo hack diesel_models
- if: env.diesel_models_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p diesel_models
-
- - name: Cargo hack drainer
- if: env.drainer_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p drainer
-
- - name: Cargo hack external_services
- if: env.external_services_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p external_services
-
- - name: Cargo hack masking
- if: env.masking_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p masking
-
- - name: Cargo hack redis_interface
- if: env.redis_interface_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p redis_interface
-
- - name: Cargo hack router
- if: env.router_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --skip kms,basilisk,kv_store,accounts_cache,openapi --no-dev-deps -p router
-
- - name: Cargo hack router_derive
- if: env.router_derive_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p router_derive
-
- - name: Cargo hack storage_impl
- if: env.storage_impl_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p storage_impl
-
- - name: Cargo hack router_env
- if: env.router_env_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p router_env
+ run: |
+ # Store packages to check in an array by splitting `PACKAGES_CHECKED` by space
+ IFS=' ' read -ra PACKAGES_CHECKED <<< "${PACKAGES_CHECKED}"
- - name: Cargo hack test_utils
- if: env.test_utils_changes_exist == 'true'
- shell: bash
- run: cargo hack check --each-feature --no-dev-deps -p test_utils
+ for package in "${PACKAGES_CHECKED[@]}"; do
+ printf '::group::Running `cargo hack` on package `%s`\n' "${package}"
+ cargo hack check --each-feature --all-targets --package "${package}"
+ echo '::endgroup::'
+ done
typos:
name: Spell check
runs-on: ubuntu-latest
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Spell check
uses: crate-ci/typos@master
diff --git a/.github/workflows/CI-push.yml b/.github/workflows/CI-push.yml
index edc9317e526d..90b301bbd9e5 100644
--- a/.github/workflows/CI-push.yml
+++ b/.github/workflows/CI-push.yml
@@ -25,7 +25,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@master
@@ -50,7 +50,7 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Install mold linker
uses: rui314/setup-mold@v1
@@ -63,12 +63,12 @@ jobs:
with:
toolchain: 1.65
- - uses: Swatinem/rust-cache@v2.4.0
+ - uses: Swatinem/rust-cache@v2.7.0
with:
save-if: ${{ github.event_name == 'push' }}
- name: Install cargo-hack
- uses: baptiste0928/cargo-install@v2.1.0
+ uses: baptiste0928/cargo-install@v2.2.0
with:
crate: cargo-hack
version: 0.6.5
@@ -80,8 +80,8 @@ jobs:
- name: Cargo hack
if: ${{ github.event_name == 'push' }}
shell: bash
- run: cargo hack check --each-feature --no-dev-deps
-
+ run: cargo hack check --workspace --each-feature --all-targets
+
- name: Cargo build release
if: ${{ github.event_name == 'merge_group' }}
shell: bash
@@ -101,7 +101,7 @@ jobs:
# steps:
# - name: Checkout repository
- # uses: actions/checkout@v3
+ # uses: actions/checkout@v4
# - name: Run cargo-deny
# uses: EmbarkStudios/cargo-deny-action@v1.3.2
@@ -121,7 +121,7 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Install mold linker
uses: rui314/setup-mold@v1
@@ -136,16 +136,16 @@ jobs:
components: clippy
- name: Install cargo-hack
- uses: baptiste0928/cargo-install@v2.1.0
+ uses: baptiste0928/cargo-install@v2.2.0
with:
crate: cargo-hack
# - name: Install cargo-nextest
- # uses: baptiste0928/cargo-install@v2.1.0
+ # uses: baptiste0928/cargo-install@v2.2.0
# with:
# crate: cargo-nextest
- - uses: Swatinem/rust-cache@v2.4.0
+ - uses: Swatinem/rust-cache@v2.7.0
with:
save-if: ${{ github.event_name == 'push' }}
@@ -166,7 +166,7 @@ jobs:
- name: Cargo hack
if: ${{ github.event_name == 'push' }}
shell: bash
- run: cargo hack check --each-feature --no-dev-deps
+ run: cargo hack check --workspace --each-feature --all-targets
- name: Cargo build release
if: ${{ github.event_name == 'merge_group' }}
@@ -178,7 +178,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Spell check
uses: crate-ci/typos@master
diff --git a/.github/workflows/auto-release-tag.yml b/.github/workflows/auto-release-tag.yml
index 5334c914cda5..4555b68764c1 100644
--- a/.github/workflows/auto-release-tag.yml
+++ b/.github/workflows/auto-release-tag.yml
@@ -10,18 +10,18 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Login to Docker Hub
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_PASSWD }}
- name: Build and push router Docker image
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v5
with:
build-args: |
BINARY=router
@@ -30,7 +30,7 @@ jobs:
tags: juspaydotin/orca:${{ github.ref_name }}, juspaydotin/hyperswitch-router:${{ github.ref_name }}
- name: Build and push consumer Docker image
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v5
with:
build-args: |
BINARY=scheduler
@@ -40,7 +40,7 @@ jobs:
tags: juspaydotin/orca-consumer:${{ github.ref_name }}, juspaydotin/hyperswitch-consumer:${{ github.ref_name }}
- name: Build and push producer Docker image
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v5
with:
build-args: |
BINARY=scheduler
@@ -50,7 +50,7 @@ jobs:
tags: juspaydotin/orca-producer:${{ github.ref_name }}, juspaydotin/hyperswitch-producer:${{ github.ref_name }}
- name: Build and push drainer Docker image
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v5
with:
build-args: |
BINARY=drainer
diff --git a/.github/workflows/connector-sanity-tests.yml b/.github/workflows/connector-sanity-tests.yml
index 40a3c3612503..48e6a946a450 100644
--- a/.github/workflows/connector-sanity-tests.yml
+++ b/.github/workflows/connector-sanity-tests.yml
@@ -79,14 +79,14 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@master
with:
toolchain: stable 2 weeks ago
- - uses: Swatinem/rust-cache@v2.4.0
+ - uses: Swatinem/rust-cache@v2.7.0
- name: Decrypt connector auth file
env:
diff --git a/.github/workflows/connector-ui-sanity-tests.yml b/.github/workflows/connector-ui-sanity-tests.yml
index 5db45f2962a5..f3d4635ab11d 100644
--- a/.github/workflows/connector-ui-sanity-tests.yml
+++ b/.github/workflows/connector-ui-sanity-tests.yml
@@ -82,24 +82,33 @@ jobs:
- name: Checkout repository
if: ${{ (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) }}
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- - name: Decrypt connector auth file
+ - name: Download Encrypted TOML from S3 and Decrypt
if: ${{ (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) }}
env:
+ AWS_ACCESS_KEY_ID: ${{ secrets.CONNECTOR_CREDS_AWS_ACCESS_KEY_ID }}
+ AWS_REGION: ${{ secrets.CONNECTOR_CREDS_AWS_REGION }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.CONNECTOR_CREDS_AWS_SECRET_ACCESS_KEY }}
CONNECTOR_AUTH_PASSPHRASE: ${{ secrets.CONNECTOR_AUTH_PASSPHRASE }}
+ CONNECTOR_CREDS_S3_BUCKET_URI: ${{ secrets.CONNECTOR_CREDS_S3_BUCKET_URI}}
+ DESTINATION_FILE_NAME: "connector_auth.toml.gpg"
+ S3_SOURCE_FILE_NAME: "cf05a6ab-525e-4888-98b3-3b4a443b87c0.toml.gpg"
shell: bash
- run: ./scripts/decrypt_connector_auth.sh
+ run: |
+ mkdir -p ${HOME}/target/secrets ${HOME}/target/test
+ aws s3 cp "${CONNECTOR_CREDS_S3_BUCKET_URI}/${S3_SOURCE_FILE_NAME}" "${HOME}/target/secrets/${DESTINATION_FILE_NAME}"
+ gpg --quiet --batch --yes --decrypt --passphrase="${CONNECTOR_AUTH_PASSPHRASE}" --output "${HOME}/target/test/connector_auth.toml" "${HOME}/target/secrets/${DESTINATION_FILE_NAME}"
- name: Set connector auth file path in env
if: ${{ (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) }}
shell: bash
- run: echo "CONNECTOR_AUTH_FILE_PATH=$HOME/target/test/connector_auth.toml" >> $GITHUB_ENV
+ run: echo "CONNECTOR_AUTH_FILE_PATH=${HOME}/target/test/connector_auth.toml" >> $GITHUB_ENV
- name: Set connector tests file path in env
if: ${{ (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) }}
shell: bash
- run: echo "CONNECTOR_TESTS_FILE_PATH=$HOME/target/test/connector_tests.json" >> $GITHUB_ENV
+ run: echo "CONNECTOR_TESTS_FILE_PATH=${HOME}/target/test/connector_tests.json" >> $GITHUB_ENV
- name: Set ignore_browser_profile usage in env
if: ${{ (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) }}
@@ -113,10 +122,10 @@ jobs:
toolchain: stable
- name: Build and Cache Rust Dependencies
- uses: Swatinem/rust-cache@v2.4.0
+ uses: Swatinem/rust-cache@v2.7.0
- name: Install Diesel CLI with Postgres Support
- uses: baptiste0928/cargo-install@v2.1.0
+ uses: baptiste0928/cargo-install@v2.2.0
if: ${{ (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) }}
with:
crate: diesel_cli
@@ -154,9 +163,9 @@ jobs:
failed_connectors=()
for i in $(echo "$INPUT" | tr "," "\n"); do
- echo $i
+ echo "${i}"
if ! cargo test --package test_utils --test connectors -- "${i}_ui::" --test-threads=1; then
- failed_connectors+=("$i")
+ failed_connectors+=("${i}")
fi
done
diff --git a/.github/workflows/conventional-commit-check.yml b/.github/workflows/conventional-commit-check.yml
deleted file mode 100644
index 5fd25e9332d1..000000000000
--- a/.github/workflows/conventional-commit-check.yml
+++ /dev/null
@@ -1,86 +0,0 @@
-name: Conventional Commit Message Check
-
-on:
- # This is a dangerous event trigger as it causes the workflow to run in the
- # context of the target repository.
- # Avoid checking out the head of the pull request or building code from the
- # pull request whenever this trigger is used.
- # Since we only label pull requests, do not have a checkout step in this
- # workflow, and restrict permissions on the token, this is an acceptable
- # use of this trigger.
- pull_request_target:
- types:
- - opened
- - edited
- - reopened
- - ready_for_review
- - synchronize
-
- merge_group:
- types:
- - checks_requested
-
-permissions:
- # Reference: https://github.com/cli/cli/issues/6274
- repository-projects: read
- pull-requests: write
-
-env:
- # Allow more retries for network requests in cargo (downloading crates) and
- # rustup (installing toolchains). This should help to reduce flaky CI failures
- # from transient network timeouts or other issues.
- CARGO_NET_RETRY: 10
- RUSTUP_MAX_RETRIES: 10
- # Use cargo's sparse index protocol
- CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse
-
-jobs:
- pr_title_check:
- name: Verify PR title follows conventional commit standards
- runs-on: ubuntu-latest
-
- steps:
- - name: Install Rust
- uses: dtolnay/rust-toolchain@master
- with:
- toolchain: stable 2 weeks ago
-
- - uses: baptiste0928/cargo-install@v2.1.0
- with:
- crate: cocogitto
-
- - name: Verify PR title follows conventional commit standards
- id: pr_title_check
- if: ${{ github.event_name == 'pull_request_target' }}
- shell: bash
- env:
- TITLE: ${{ github.event.pull_request.title }}
- continue-on-error: true
- run: cog verify "$TITLE"
-
- - name: Verify commit message follows conventional commit standards
- id: commit_message_check
- if: ${{ github.event_name == 'merge_group' }}
- shell: bash
- # Fail on error, we don't have context about PR information to update labels
- continue-on-error: false
- run: cog verify '${{ github.event.merge_group.head_commit.message }}'
-
- # GitHub CLI returns a successful error code even if the PR has the label already attached
- - name: Attach 'S-conventions-not-followed' label if PR title check failed
- if: ${{ github.event_name == 'pull_request_target' && steps.pr_title_check.outcome == 'failure' }}
- shell: bash
- env:
- GH_TOKEN: ${{ github.token }}
- run: |
- gh --repo ${{ github.event.repository.full_name }} pr edit --add-label 'S-conventions-not-followed' ${{ github.event.pull_request.number }}
- echo "::error::PR title does not follow conventional commit standards"
- exit 1
-
- # GitHub CLI returns a successful error code even if the PR does not have the label attached
- - name: Remove 'S-conventions-not-followed' label if PR title check succeeded
- if: ${{ github.event_name == 'pull_request_target' && steps.pr_title_check.outcome == 'success' }}
- shell: bash
- env:
- GH_TOKEN: ${{ github.token }}
- run: gh --repo ${{ github.event.repository.full_name }} pr edit --remove-label 'S-conventions-not-followed' ${{ github.event.pull_request.number }}
diff --git a/.github/workflows/create-hotfix-branch.yml b/.github/workflows/create-hotfix-branch.yml
index 77a8bad6bc66..d7afb388f2f8 100644
--- a/.github/workflows/create-hotfix-branch.yml
+++ b/.github/workflows/create-hotfix-branch.yml
@@ -8,26 +8,34 @@ jobs:
runs-on: ubuntu-latest
steps:
+ - name: Generate a token
+ if: ${{ github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name }}
+ id: generate_token
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ secrets.HYPERSWITCH_BOT_APP_ID }}
+ private-key: ${{ secrets.HYPERSWITCH_BOT_APP_PRIVATE_KEY }}
+
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 0
- token: ${{ secrets.AUTO_RELEASE_PAT }}
+ token: ${{ steps.generate_token.outputs.token }}
- name: Check if the input is valid tag
shell: bash
run: |
- if [[ ${{github.ref}} =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
- echo "::notice::${{github.ref}} is a valid tag."
+ if [[ ${{github.ref}} =~ ^refs/tags/[0-9]{4}\.[0-9]{2}\.[0-9]{2}\.[0-9]+$ ]]; then
+ echo "::notice::${{github.ref}} is a CalVer tag."
else
- echo "::error::${{github.ref}} is not a valid tag."
+ echo "::error::${{github.ref}} is not a CalVer tag."
exit 1
fi
- name: Create hotfix branch
shell: bash
run: |
- HOTFIX_BRANCH="hotfix-${GITHUB_REF#refs/tags/v}"
+ HOTFIX_BRANCH="hotfix-${GITHUB_REF#refs/tags/}"
if git switch --create "$HOTFIX_BRANCH"; then
git push origin "$HOTFIX_BRANCH"
diff --git a/.github/workflows/create-hotfix-tag.yml b/.github/workflows/create-hotfix-tag.yml
index 45699bda24dc..2250ce7ece59 100644
--- a/.github/workflows/create-hotfix-tag.yml
+++ b/.github/workflows/create-hotfix-tag.yml
@@ -8,14 +8,22 @@ jobs:
runs-on: ubuntu-latest
steps:
+ - name: Generate a token
+ if: ${{ github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name }}
+ id: generate_token
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ secrets.HYPERSWITCH_BOT_APP_ID }}
+ private-key: ${{ secrets.HYPERSWITCH_BOT_APP_PRIVATE_KEY }}
+
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 0
- token: ${{ secrets.AUTO_RELEASE_PAT }}
+ token: ${{ steps.generate_token.outputs.token }}
- name: Install git-cliff
- uses: baptiste0928/cargo-install@v2.1.0
+ uses: baptiste0928/cargo-install@v2.2.0
with:
crate: git-cliff
version: 1.2.0
@@ -23,10 +31,10 @@ jobs:
- name: Check if the input is valid hotfix branch
shell: bash
run: |
- if [[ ${{github.ref}} =~ ^refs/heads/hotfix-[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
- echo "::notice::${{github.ref}} is a valid branch."
+ if [[ ${{github.ref}} =~ ^refs/heads/hotfix-[0-9]{4}\.[0-9]{2}\.[0-9]{2}\.[0-9]+$ ]]; then
+ echo "::notice::${{github.ref}} is a valid hotfix branch."
else
- echo "::error::${{github.ref}} is not a valid branch."
+ echo "::error::${{github.ref}} is not a valid hotfix branch."
exit 1
fi
@@ -48,11 +56,11 @@ jobs:
local previous_hotfix_number
local next_tag
- previous_hotfix_number="$(echo "${previous_tag}" | awk -F. '{ print $4 }')"
+ previous_hotfix_number="$(echo "${previous_tag}" | awk -F. '{ print $4 }' | sed -E 's/([0-9]+)(-hotfix([0-9]+))?/\3/')"
if [[ -z "${previous_hotfix_number}" ]]; then
# Previous tag was not a hotfix tag
- next_tag="${previous_tag}+hotfix.1"
+ next_tag="${previous_tag}-hotfix1"
else
# Previous tag was a hotfix tag, increment hotfix number
local hotfix_number=$((previous_hotfix_number + 1))
@@ -62,7 +70,13 @@ jobs:
echo "${next_tag}"
}
- PREVIOUS_TAG="$(git tag --merged | sort --version-sort | tail --lines 1)"
+ # Search for date-like tags (no strict checking), sort and obtain previous tag
+ PREVIOUS_TAG="$(
+ git tag --merged \
+ | grep --extended-regexp '[0-9]{4}\.[0-9]{2}\.[0-9]{2}' \
+ | sort --version-sort \
+ | tail --lines 1
+ )"
NEXT_TAG="$(get_next_tag "${PREVIOUS_TAG}")"
echo "PREVIOUS_TAG=${PREVIOUS_TAG}" >> $GITHUB_ENV
@@ -86,8 +100,8 @@ jobs:
- name: Set Git Configuration
shell: bash
run: |
- git config --local user.name 'github-actions'
- git config --local user.email '41898282+github-actions[bot]@users.noreply.github.com'
+ git config --local user.name 'hyperswitch-bot[bot]'
+ git config --local user.email '148525504+hyperswitch-bot[bot]@users.noreply.github.com'
- name: Push created commit and tag
shell: bash
diff --git a/.github/workflows/hotfix-pr-check.yml b/.github/workflows/hotfix-pr-check.yml
index 59e0bbee3cb4..e178ba31c1e8 100644
--- a/.github/workflows/hotfix-pr-check.yml
+++ b/.github/workflows/hotfix-pr-check.yml
@@ -15,12 +15,13 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Get hotfix pull request body
shell: bash
- run: |
- echo '${{ github.event.pull_request.body }}' > hotfix_pr_body.txt
+ env:
+ PR_BODY: ${{ github.event.pull_request.body }}
+ run: echo $PR_BODY > hotfix_pr_body.txt
- name: Get a list of all original PR numbers
shell: bash
diff --git a/.github/workflows/manual-release.yml b/.github/workflows/manual-release.yml
index 0b70631e113d..9ae80047a669 100644
--- a/.github/workflows/manual-release.yml
+++ b/.github/workflows/manual-release.yml
@@ -17,18 +17,18 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Login to Docker Hub
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_PASSWD }}
- name: Build and push router Docker image
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v5
with:
build-args: |
RUN_ENV=${{ inputs.environment }}
@@ -39,7 +39,7 @@ jobs:
tags: juspaydotin/orca:${{ github.sha }}
- name: Build and push consumer Docker image
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v5
with:
build-args: |
RUN_ENV=${{ inputs.environment }}
@@ -50,7 +50,7 @@ jobs:
tags: juspaydotin/orca-consumer:${{ github.sha }}
- name: Build and push producer Docker image
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v5
with:
build-args: |
RUN_ENV=${{ inputs.environment }}
@@ -61,7 +61,7 @@ jobs:
tags: juspaydotin/orca-producer:${{ github.sha }}
- name: Build and push drainer Docker image
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v5
with:
build-args: |
RUN_ENV=${{ inputs.environment }}
diff --git a/.github/workflows/migration-check.yaml b/.github/workflows/migration-check.yaml
index 0c4baaa96193..b740bd3a5b77 100644
--- a/.github/workflows/migration-check.yaml
+++ b/.github/workflows/migration-check.yaml
@@ -40,14 +40,14 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@master
with:
toolchain: stable 2 weeks ago
- - uses: baptiste0928/cargo-install@v2.1.0
+ - uses: baptiste0928/cargo-install@v2.2.0
with:
crate: diesel_cli
features: postgres
diff --git a/.github/workflows/postman-collection-runner.yml b/.github/workflows/postman-collection-runner.yml
index 3291755b56cf..8cbbed8187c2 100644
--- a/.github/workflows/postman-collection-runner.yml
+++ b/.github/workflows/postman-collection-runner.yml
@@ -50,29 +50,39 @@ jobs:
steps:
- name: Repository checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- - name: Decrypt connector auth file
+ - name: Download Encrypted TOML from S3 and Decrypt
if: ${{ ((github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name)) || (github.event_name == 'merge_group')}}
env:
+ AWS_ACCESS_KEY_ID: ${{ secrets.CONNECTOR_CREDS_AWS_ACCESS_KEY_ID }}
+ AWS_REGION: ${{ secrets.CONNECTOR_CREDS_AWS_REGION }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.CONNECTOR_CREDS_AWS_SECRET_ACCESS_KEY }}
CONNECTOR_AUTH_PASSPHRASE: ${{ secrets.CONNECTOR_AUTH_PASSPHRASE }}
+ CONNECTOR_CREDS_S3_BUCKET_URI: ${{ secrets.CONNECTOR_CREDS_S3_BUCKET_URI}}
+ DESTINATION_FILE_NAME: "connector_auth.toml.gpg"
+ S3_SOURCE_FILE_NAME: "cf05a6ab-525e-4888-98b3-3b4a443b87c0.toml.gpg"
shell: bash
- run: ./scripts/decrypt_connector_auth.sh
+ run: |
+ mkdir -p ${HOME}/target/secrets ${HOME}/target/test
+
+ aws s3 cp "${CONNECTOR_CREDS_S3_BUCKET_URI}/${S3_SOURCE_FILE_NAME}" "${HOME}/target/secrets/${DESTINATION_FILE_NAME}"
+ gpg --quiet --batch --yes --decrypt --passphrase="${CONNECTOR_AUTH_PASSPHRASE}" --output "${HOME}/target/test/connector_auth.toml" "${HOME}/target/secrets/${DESTINATION_FILE_NAME}"
- name: Set paths in env
if: ${{ ((github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name)) || (github.event_name == 'merge_group')}}
id: config_path
shell: bash
run: |
- echo "CONNECTOR_AUTH_FILE_PATH=$HOME/target/test/connector_auth.toml" >> $GITHUB_ENV
+ echo "CONNECTOR_AUTH_FILE_PATH=${HOME}/target/test/connector_auth.toml" >> $GITHUB_ENV
- name: Fetch keys
if: ${{ ((github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name)) || (github.event_name == 'merge_group')}}
env:
TOML_PATH: "./config/development.toml"
run: |
- LOCAL_ADMIN_API_KEY=$(yq '.secrets.admin_api_key' $TOML_PATH)
- echo "ADMIN_API_KEY=$LOCAL_ADMIN_API_KEY" >> $GITHUB_ENV
+ LOCAL_ADMIN_API_KEY=$(yq '.secrets.admin_api_key' ${TOML_PATH})
+ echo "ADMIN_API_KEY=${LOCAL_ADMIN_API_KEY}" >> $GITHUB_ENV
- name: Install Rust
if: ${{ ((github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name)) || (github.event_name == 'merge_group')}}
@@ -82,11 +92,11 @@ jobs:
- name: Build and Cache Rust Dependencies
if: ${{ ((github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name)) || (github.event_name == 'merge_group')}}
- uses: Swatinem/rust-cache@v2.4.0
+ uses: Swatinem/rust-cache@v2.7.0
- name: Install Diesel CLI with Postgres Support
if: ${{ ((github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name)) || (github.event_name == 'merge_group')}}
- uses: baptiste0928/cargo-install@v2.1.0
+ uses: baptiste0928/cargo-install@v2.2.0
with:
crate: diesel_cli
features: postgres
@@ -118,7 +128,7 @@ jobs:
while ! nc -z localhost 8080; do
if [ $COUNT -gt 12 ]; then # Wait for up to 2 minutes (12 * 10 seconds)
echo "Server did not start within a reasonable time. Exiting."
- kill $SERVER_PID
+ kill ${SERVER_PID}
exit 1
else
COUNT=$((COUNT+1))
@@ -141,10 +151,10 @@ jobs:
export PATH=${NEWMAN_PATH}:${PATH}
failed_connectors=()
- for i in $(echo "$CONNECTORS" | tr "," "\n"); do
- echo $i
- if ! cargo run --bin test_utils -- --connector-name="$i" --base-url="$BASE_URL" --admin-api-key="$ADMIN_API_KEY"; then
- failed_connectors+=("$i")
+ for i in $(echo "${CONNECTORS}" | tr "," "\n"); do
+ echo "${i}"
+ if ! cargo run --bin test_utils -- --connector-name="${i}" --base-url="${BASE_URL}" --admin-api-key="${ADMIN_API_KEY}"; then
+ failed_connectors+=("${i}")
fi
done
diff --git a/.github/workflows/pr-convention-checks.yml b/.github/workflows/pr-convention-checks.yml
new file mode 100644
index 000000000000..37732e7c548c
--- /dev/null
+++ b/.github/workflows/pr-convention-checks.yml
@@ -0,0 +1,128 @@
+name: Pull Request Convention Checks
+
+on:
+ # This is a dangerous event trigger as it causes the workflow to run in the
+ # context of the target repository.
+ # Avoid checking out the head of the pull request or building code from the
+ # pull request whenever this trigger is used.
+ # Since we do not have a checkout step in this workflow, this is an
+ # acceptable use of this trigger.
+ pull_request_target:
+ types:
+ - opened
+ - edited
+ - reopened
+ - ready_for_review
+ - synchronize
+
+ merge_group:
+ types:
+ - checks_requested
+
+env:
+ # Allow more retries for network requests in cargo (downloading crates) and
+ # rustup (installing toolchains). This should help to reduce flaky CI failures
+ # from transient network timeouts or other issues.
+ CARGO_NET_RETRY: 10
+ RUSTUP_MAX_RETRIES: 10
+
+jobs:
+ pr_title_conventional_commit_check:
+ name: Verify PR title follows conventional commit standards
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Install Rust
+ uses: dtolnay/rust-toolchain@master
+ with:
+ toolchain: stable
+
+ - uses: baptiste0928/cargo-install@v2.2.0
+ with:
+ crate: cocogitto
+
+ - name: Verify PR title follows conventional commit standards
+ if: ${{ github.event_name == 'pull_request_target' }}
+ shell: bash
+ env:
+ TITLE: ${{ github.event.pull_request.title }}
+ run: cog verify "$TITLE"
+
+ - name: Verify commit message follows conventional commit standards
+ if: ${{ github.event_name == 'merge_group' }}
+ shell: bash
+ run: cog verify '${{ github.event.merge_group.head_commit.message }}'
+
+ pr_linked_issues_check:
+ name: Verify PR contains one or more linked issues
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Skip check for merge queue
+ if: ${{ github.event_name == 'merge_group' }}
+ shell: bash
+ run: echo "Skipping PR linked issues check for merge queue"
+
+ - name: Generate GitHub app token
+ id: generate_app_token
+ if: ${{ github.event_name == 'pull_request_target' }}
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ secrets.HYPERSWITCH_BOT_APP_ID }}
+ private-key: ${{ secrets.HYPERSWITCH_BOT_APP_PRIVATE_KEY }}
+ owner: ${{ github.event.repository.owner.login }}
+
+ - name: Verify PR contains one or more linked issues
+ if: ${{ github.event_name == 'pull_request_target' }}
+ shell: bash
+ env:
+ GH_TOKEN: ${{ steps.generate_app_token.outputs.token }}
+ run: |
+ # GitHub does not provide information about linked issues for a pull request via the REST API.
+ # This information is available only within the GraphQL API.
+
+ # Obtain issue number and repository name with owner (in the `owner/repo` format) for all linked issues
+ query='query ($owner: String!, $repository: String!, $prNumber: Int!) {
+ repository(owner: $owner, name: $repository) {
+ pullRequest(number: $prNumber) {
+ closingIssuesReferences(first: 10) {
+ nodes {
+ number
+ repository {
+ nameWithOwner
+ }
+ }
+ }
+ }
+ }
+ }'
+
+ # Obtain linked issues in the `owner/repo#issue_number` format, one issue per line.
+ # The variable contains an empty string if the pull request has no linked issues.
+ linked_issues="$(
+ gh api graphql \
+ --raw-field "query=${query}" \
+ --field 'owner=${{ github.event.repository.owner.login }}' \
+ --field 'repository=${{ github.event.repository.name }}' \
+ --field 'prNumber=${{ github.event.pull_request.number }}' \
+ --jq '.data.repository.pullRequest.closingIssuesReferences.nodes[] | "\(.repository.nameWithOwner)#\(.number)"'
+ )"
+
+ if [[ -z "${linked_issues}" ]]; then
+ echo "::error::PR does not contain any linked issues"
+ exit 1
+ else
+ echo "PR contains at least one linked issue"
+ fi
+
+ while IFS= read -r issue; do
+ # Split `${issue}` by `#` to obtain repository with owner (in `owner/repository` format) and issue number
+ IFS='#' read -r repository_with_owner issue_number <<< "${issue}"
+ issue_state="$(gh issue view --repo "${repository_with_owner}" --json 'state' "${issue_number}" --jq '.state')"
+
+ # Transform `${issue_state}` to lowercase for comparison
+ if [[ "${issue_state,,}" != 'open' ]]; then
+ echo "::error::At least one of the linked issues is not open"
+ exit 1
+ fi
+ done <<< "${linked_issues}"
diff --git a/.github/workflows/pr-title-spell-check.yml b/.github/workflows/pr-title-spell-check.yml
index 6ab6f184739d..03b5a8758870 100644
--- a/.github/workflows/pr-title-spell-check.yml
+++ b/.github/workflows/pr-title-spell-check.yml
@@ -13,7 +13,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Store PR title in a file
shell: bash
diff --git a/.github/workflows/release-new-version.yml b/.github/workflows/release-new-version.yml
deleted file mode 100644
index 872c207e8aa3..000000000000
--- a/.github/workflows/release-new-version.yml
+++ /dev/null
@@ -1,120 +0,0 @@
-name: Release a new version
-
-on:
- schedule:
- - cron: "30 14 * * 0-4" # Run workflow at 8 PM IST every Sunday-Thursday
-
- workflow_dispatch:
-
-concurrency:
- group: ${{ github.workflow }}-${{ github.ref }}
- cancel-in-progress: true
-
-env:
- # Allow more retries for network requests in cargo (downloading crates) and
- # rustup (installing toolchains). This should help to reduce flaky CI failures
- # from transient network timeouts or other issues.
- CARGO_NET_RETRY: 10
- RUSTUP_MAX_RETRIES: 10
-
-jobs:
- create-release:
- name: Release a new version
- runs-on: ubuntu-latest
-
- steps:
- - name: Checkout repository
- uses: actions/checkout@v3
- with:
- fetch-depth: 0
- token: ${{ secrets.AUTO_RELEASE_PAT }}
-
- - name: Install Rust
- uses: dtolnay/rust-toolchain@master
- with:
- toolchain: stable 2 weeks ago
-
- - name: Install cocogitto
- uses: baptiste0928/cargo-install@v2.1.0
- with:
- crate: cocogitto
- version: 5.4.0
-
- - name: Install git-cliff
- uses: baptiste0928/cargo-install@v2.1.0
- with:
- crate: git-cliff
- version: 1.2.0
-
- - name: Install changelog-gh-usernames
- uses: baptiste0928/cargo-install@v2.1.0
- with:
- crate: changelog-gh-usernames
- git: https://github.com/SanchithHegde/changelog-gh-usernames
- rev: dab6da3ff99dbbff8650c114984c4d8be5161ac8
-
- - name: Set Git Configuration
- shell: bash
- run: |
- git config --local user.name 'github-actions'
- git config --local user.email '41898282+github-actions[bot]@users.noreply.github.com'
-
- - name: Update Postman collection files from Postman directories
- shell: bash
- run: |
- # maybe we need to move this package.json as we need it in multiple workflows
- npm ci
- POSTMAN_DIR=postman/collection-dir
- POSTMAN_JSON_DIR=postman/collection-json
- NEWMAN_PATH=$(pwd)/node_modules/.bin
- export PATH=${NEWMAN_PATH}:${PATH}
- # generate postman collections for all postman directories
- for connector_dir in ${POSTMAN_DIR}/*
- do
- connector=$(basename ${connector_dir})
- newman dir-import ${POSTMAN_DIR}/${connector} -o ${POSTMAN_JSON_DIR}/${connector}.postman_collection.json
- done
-
- if git add postman && ! git diff --staged --quiet postman; then
- git commit --message 'test(postman): update postman collection files'
- echo "Changes detected and commited."
- fi
-
- - name: Obtain previous and new tag information
- shell: bash
- # Only consider tags on current branch when setting PREVIOUS_TAG
- run: |
- PREVIOUS_TAG="$(git tag --sort='version:refname' --merged | tail --lines 1)"
- if [[ "$(cog bump --auto --dry-run)" == *"No conventional commits for your repository that required a bump"* ]]; then
- NEW_TAG="$(cog bump --patch --dry-run)"
- elif [[ "${PREVIOUS_TAG}" != "${NEW_TAG}" ]]; then
- NEW_TAG="$(cog bump --auto --dry-run)"
- fi
- echo "NEW_TAG=${NEW_TAG}" >> $GITHUB_ENV
- echo "PREVIOUS_TAG=${PREVIOUS_TAG}" >> $GITHUB_ENV
-
- - name: Update changelog and create tag
- shell: bash
- if: ${{ env.NEW_TAG != env.PREVIOUS_TAG }}
- # Remove prefix 'v' from 'NEW_TAG' as cog bump --version expects only the version number
- run: |
- cog bump --version ${NEW_TAG#v}
-
- - name: Push created commit and tag
- shell: bash
- if: ${{ env.NEW_TAG != env.PREVIOUS_TAG }}
- run: |
- git push
- git push --tags
-
- - name: Generate release notes and create GitHub release
- shell: bash
- if: ${{ env.NEW_TAG != env.PREVIOUS_TAG }}
- env:
- GITHUB_TOKEN: ${{ github.token }}
- GH_TOKEN: ${{ secrets.AUTO_RELEASE_PAT }}
- # Need to consider commits inclusive of previous tag to generate diff link between versions.
- # This would also then require us to remove the last few lines from the changelog.
- run: |
- git-cliff --config .github/git-cliff-release.toml "${PREVIOUS_TAG}^..${NEW_TAG}" | changelog-gh-usernames | sed "/## ${PREVIOUS_TAG#v}/,\$d" > release-notes.md
- gh release create "${NEW_TAG}" --notes-file release-notes.md --verify-tag --title "Hyperswitch ${NEW_TAG}"
diff --git a/.github/workflows/release-nightly-version-reusable.yml b/.github/workflows/release-nightly-version-reusable.yml
new file mode 100644
index 000000000000..f982a699895a
--- /dev/null
+++ b/.github/workflows/release-nightly-version-reusable.yml
@@ -0,0 +1,158 @@
+name: Create a nightly tag
+
+on:
+ workflow_call:
+ secrets:
+ token:
+ description: GitHub token for authenticating with GitHub
+ required: true
+ outputs:
+ tag:
+ description: The tag that was created by the workflow
+ value: ${{ jobs.create-nightly-tag.outputs.tag }}
+
+env:
+ # Allow more retries for network requests in cargo (downloading crates) and
+ # rustup (installing toolchains). This should help to reduce flaky CI failures
+ # from transient network timeouts or other issues.
+ CARGO_NET_RETRY: 10
+ RUSTUP_MAX_RETRIES: 10
+
+ # The branch name that this workflow is allowed to run on.
+ # If the workflow is run on any other branch, this workflow will fail.
+ ALLOWED_BRANCH_NAME: main
+
+jobs:
+ create-nightly-tag:
+ name: Create a nightly tag
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ token: ${{ secrets.token }}
+
+ - name: Check if the workflow is run on an allowed branch
+ shell: bash
+ run: |
+ if [[ "${{ github.ref }}" != "refs/heads/${ALLOWED_BRANCH_NAME}" ]]; then
+ echo "::error::This workflow is expected to be run from the '${ALLOWED_BRANCH_NAME}' branch. Current branch: '${{ github.ref }}'"
+ exit 1
+ fi
+
+ - name: Check if the latest commit is a tag
+ shell: bash
+ run: |
+ if [[ -n "$(git tag --points-at HEAD)" ]]; then
+ echo "::error::The latest commit on the branch is already a tag"
+ exit 1
+ fi
+
+ # Pulling latest changes in case pre-release steps push new commits
+ - name: Pull allowed branch
+ shell: bash
+ run: git pull
+
+ - name: Install Rust
+ uses: dtolnay/rust-toolchain@master
+ with:
+ toolchain: stable
+
+ - name: Install git-cliff
+ uses: baptiste0928/cargo-install@v2.2.0
+ with:
+ crate: git-cliff
+ version: 1.4.0
+
+ - name: Obtain previous and next tag information
+ shell: bash
+ run: |
+ # Calendar versioning format followed: `YYYY.0M.0D.MICRO`
+ # - MICRO version number starts from 0 (to allow for multiple tags in a single day)
+ # - Hotfixes or patches can be suffixed as `-hotfix1` or `-patch1` after the MICRO version number
+
+ CURRENT_UTC_DATE="$(date --utc '+%04Y.%02m.%02d')"
+
+ # Check if any tags exist on the current branch which contain the current UTC date
+ if ! git tag --merged | grep --quiet "${CURRENT_UTC_DATE}"; then
+ # Search for date-like tags (no strict checking), sort and obtain previous tag
+ PREVIOUS_TAG="$(
+ git tag --merged \
+ | grep --extended-regexp '[0-9]{4}\.[0-9]{2}\.[0-9]{2}' \
+ | sort --version-sort \
+ | tail --lines 1
+ )"
+
+ # No tags with current date exist, next tag will be just tagged with current date and micro version number 0
+ NEXT_MICRO_VERSION_NUMBER='0'
+ NEXT_TAG="${CURRENT_UTC_DATE}.${NEXT_MICRO_VERSION_NUMBER}"
+
+ else
+ # Some tags exist with current date, find out latest micro version number
+ PREVIOUS_TAG="$(
+ git tag --merged \
+ | grep "${CURRENT_UTC_DATE}" \
+ | sort --version-sort \
+ | tail --lines 1
+ )"
+ PREVIOUS_MICRO_VERSION_NUMBER="$(
+ echo -n "${PREVIOUS_TAG}" \
+ | sed --regexp-extended 's/[0-9]{4}\.[0-9]{2}\.[0-9]{2}(\.([0-9]+))?(-(.+))?/\2/g'
+ )"
+ # ^^^^^^^^ ^^^^^^^^ ^^^^^^^^ ^^^^^^ ^^^^
+ # YEAR MONTH DAY MICRO Any suffix, say `hotfix1`
+ #
+ # The 2nd capture group contains the micro version number
+
+ if [[ -z "${PREVIOUS_MICRO_VERSION_NUMBER}" ]]; then
+ # Micro version number is empty, set next micro version as 1
+ NEXT_MICRO_VERSION_NUMBER='1'
+ else
+ # Increment previous micro version by 1 and set it as next micro version
+ NEXT_MICRO_VERSION_NUMBER="$((PREVIOUS_MICRO_VERSION_NUMBER + 1))"
+ fi
+
+ NEXT_TAG="${CURRENT_UTC_DATE}.${NEXT_MICRO_VERSION_NUMBER}"
+ fi
+
+ echo "PREVIOUS_TAG=${PREVIOUS_TAG}" >> $GITHUB_ENV
+ echo "NEXT_TAG=${NEXT_TAG}" >> $GITHUB_ENV
+
+ - name: Generate changelog
+ shell: bash
+ run: |
+ # Generate changelog content and store it in `release-notes.md`
+ git-cliff --config '.github/git-cliff-changelog.toml' --strip header --tag "${NEXT_TAG}" "${PREVIOUS_TAG}^.." \
+ | sed "/## ${PREVIOUS_TAG}\$/,\$d" \
+ | sed '$s/$/\n- - -/' > release-notes.md
+
+ # Append release notes after the specified pattern in `CHANGELOG.md`
+ sed --in-place '0,/^- - -/!b; /^- - -/{
+ a
+ r release-notes.md
+ }' CHANGELOG.md
+ rm release-notes.md
+
+ - name: Set git configuration
+ shell: bash
+ run: |
+ git config --local user.name 'github-actions'
+ git config --local user.email '41898282+github-actions[bot]@users.noreply.github.com'
+
+ - name: Commit, tag and push generated changelog
+ shell: bash
+ run: |
+ git add CHANGELOG.md
+ git commit --message "chore(version): ${NEXT_TAG}"
+
+ git tag "${NEXT_TAG}" HEAD
+
+ git push origin "${ALLOWED_BRANCH_NAME}"
+ git push origin "${NEXT_TAG}"
+
+ - name: Set job outputs
+ shell: bash
+ run: |
+ echo "tag=${NEXT_TAG}" >> $GITHUB_OUTPUT
diff --git a/.github/workflows/release-nightly-version.yml b/.github/workflows/release-nightly-version.yml
new file mode 100644
index 000000000000..13e844e7c5d7
--- /dev/null
+++ b/.github/workflows/release-nightly-version.yml
@@ -0,0 +1,99 @@
+name: Create a nightly tag
+
+on:
+ schedule:
+ - cron: "0 0 * * 1-5" # Run workflow at 00:00 midnight UTC (05:30 AM IST) every Monday-Friday
+
+ workflow_dispatch:
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
+env:
+ # Allow more retries for network requests in cargo (downloading crates) and
+ # rustup (installing toolchains). This should help to reduce flaky CI failures
+ # from transient network timeouts or other issues.
+ CARGO_NET_RETRY: 10
+ RUSTUP_MAX_RETRIES: 10
+
+ # The branch name that this workflow is allowed to run on.
+ # If the workflow is run on any other branch, this workflow will fail.
+ ALLOWED_BRANCH_NAME: main
+
+jobs:
+ update-postman-collections:
+ name: Update Postman collection JSON files
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ token: ${{ secrets.AUTO_RELEASE_PAT }}
+
+ - name: Check if the workflow is run on an allowed branch
+ shell: bash
+ run: |
+ if [[ "${{ github.ref }}" != "refs/heads/${ALLOWED_BRANCH_NAME}" ]]; then
+ echo "::error::This workflow is expected to be run from the '${ALLOWED_BRANCH_NAME}' branch. Current branch: '${{ github.ref }}'"
+ exit 1
+ fi
+
+ - name: Check if the latest commit is a tag
+ shell: bash
+ run: |
+ if [[ -n "$(git tag --points-at HEAD)" ]]; then
+ echo "::error::The latest commit on the branch is already a tag"
+ exit 1
+ fi
+
+ - name: Update Postman collection files from Postman directories
+ shell: bash
+ run: |
+ # maybe we need to move this package.json as we need it in multiple workflows
+ npm ci
+
+ POSTMAN_DIR="postman/collection-dir"
+ POSTMAN_JSON_DIR="postman/collection-json"
+ NEWMAN_PATH="$(pwd)/node_modules/.bin"
+ export PATH="${NEWMAN_PATH}:${PATH}"
+
+ # generate Postman collection JSON files for all Postman collection directories
+ for connector_dir in "${POSTMAN_DIR}"/*
+ do
+ connector="$(basename "${connector_dir}")"
+ newman dir-import "${POSTMAN_DIR}/${connector}" -o "${POSTMAN_JSON_DIR}/${connector}.postman_collection.json"
+ done
+
+ if git add postman && ! git diff --staged --quiet postman; then
+ echo "POSTMAN_COLLECTION_FILES_UPDATED=true" >> $GITHUB_ENV
+ echo "Postman collection files have been modified"
+ else
+ echo "Postman collection files have no modifications"
+ fi
+
+ - name: Set git configuration
+ shell: bash
+ if: ${{ env.POSTMAN_COLLECTION_FILES_UPDATED == 'true' }}
+ run: |
+ git config --local user.name 'github-actions'
+ git config --local user.email '41898282+github-actions[bot]@users.noreply.github.com'
+
+ - name: Commit and push updated Postman collections if modified
+ shell: bash
+ if: ${{ env.POSTMAN_COLLECTION_FILES_UPDATED == 'true' }}
+ run: |
+ git add postman
+ git commit --message 'chore(postman): update Postman collection files'
+
+ git push origin "${ALLOWED_BRANCH_NAME}"
+
+ create-nightly-tag:
+ name: Create a nightly tag
+ uses: ./.github/workflows/release-nightly-version-reusable.yml
+ needs:
+ - update-postman-collections
+ secrets:
+ token: ${{ secrets.AUTO_RELEASE_PAT }}
diff --git a/.github/workflows/release-stable-version.yml b/.github/workflows/release-stable-version.yml
new file mode 100644
index 000000000000..93bd71ef7795
--- /dev/null
+++ b/.github/workflows/release-stable-version.yml
@@ -0,0 +1,154 @@
+name: Release a stable version
+
+on:
+ workflow_dispatch:
+ inputs:
+ bump_type:
+ description: The part of the semantic version to bump.
+ required: true
+ type: choice
+ options:
+ - patch
+ - minor
+
+jobs:
+ create-semver-tag:
+ name: Create a SemVer tag
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Generate GitHub app token
+ id: generate_app_token
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ secrets.HYPERSWITCH_BOT_APP_ID }}
+ private-key: ${{ secrets.HYPERSWITCH_BOT_APP_PRIVATE_KEY }}
+
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Check if the input is valid CalVer tag
+ shell: bash
+ run: |
+ if [[ ${{github.ref}} =~ ^refs/tags/[0-9]{4}\.[0-9]{2}\.[0-9]{2}(\.([0-9]+))?(-(.+))?$ ]]; then
+ echo "${{github.ref}} is a valid CalVer tag."
+ else
+ echo "::error::${{github.ref}} is not a valid CalVer tag."
+ exit 1
+ fi
+
+ - name: Check if user is authorized to trigger workflow
+ shell: bash
+ env:
+ GH_TOKEN: ${{ steps.generate_app_token.outputs.token }}
+ run: |
+ echo "::add-mask::${GH_TOKEN}"
+
+ function is_user_team_member() {
+ username="${1}"
+ team_slug="${2}"
+ org_name=${{ github.repository_owner }}
+
+ # We obtain HTTP status code since the API returns:
+ # - 200 status code if the user is a member of the specified team
+ # - 404 status code if the user is not a member of the specified team
+ #
+ # We cannot use the GitHub CLI since it does not seem to provide a way to obtain
+ # only the HTTP status code (yet).
+ status_code="$(
+ curl \
+ --location \
+ --silent \
+ --output /dev/null \
+ --write-out '%{http_code}' \
+ --header 'Accept: application/vnd.github+json' \
+ --header 'X-GitHub-Api-Version: 2022-11-28' \
+ --header "Authorization: Bearer ${GH_TOKEN}" \
+ "https://api.github.com/orgs/${org_name}/teams/${team_slug}/memberships/${username}"
+ )"
+
+ # Returns a boolean value, allowing it to be directly used in if conditions
+ [[ status_code -eq 200 ]]
+ }
+
+ allowed_teams=('hyperswitch-admins' 'hyperswitch-maintainers')
+ is_user_authorized=false
+ username=${{ github.triggering_actor }}
+
+ for team in "${allowed_teams[@]}"; do
+ if is_user_team_member "${username}" "${team}"; then
+ is_user_authorized=true
+ break
+ fi
+ done
+
+ if ${is_user_authorized}; then
+ echo "${username} is authorized to trigger workflow"
+ else
+ printf -v allowed_teams_comma_separated '%s, ' "${allowed_teams[@]}"
+ echo "::error::${username} is not authorized to trigger workflow; must be a member of one of these teams: ${allowed_teams_comma_separated%, }"
+ exit 1
+ fi
+
+ - name: Install Rust
+ uses: dtolnay/rust-toolchain@master
+ with:
+ toolchain: stable
+
+ - name: Install git-cliff
+ uses: baptiste0928/cargo-install@v2.2.0
+ with:
+ crate: git-cliff
+ version: 1.4.0
+
+ - name: Install convco
+ uses: baptiste0928/cargo-install@v2.2.0
+ with:
+ crate: convco
+ version: 0.5.0
+
+ - name: Obtain previous and next tag information
+ shell: bash
+ run: |
+ PREVIOUS_TAG="v$(convco version --prefix 'v')"
+ NEXT_TAG="v$(convco version --prefix 'v' "--${{ inputs.bump_type }}")"
+
+ echo "PREVIOUS_TAG=${PREVIOUS_TAG}" >> $GITHUB_ENV
+ echo "NEXT_TAG=${NEXT_TAG}" >> $GITHUB_ENV
+
+ # We make use of GitHub API calls to create the tag to have signed tags
+ - name: Create SemVer tag
+ shell: bash
+ env:
+ GH_TOKEN: ${{ steps.generate_app_token.outputs.token }}
+ run: |
+ # Create a lightweight tag to point to the checked out CalVer tag
+ gh api \
+ --method POST \
+ --header 'Accept: application/vnd.github+json' \
+ --header 'X-GitHub-Api-Version: 2022-11-28' \
+ '/repos/{owner}/{repo}/git/refs' \
+ --raw-field "ref=refs/tags/${NEXT_TAG}" \
+ --raw-field 'sha=${{ github.sha }}'
+
+ - name: Generate changelog
+ shell: bash
+ run: |
+ # Override git-cliff tag pattern to only consider SemVer tags
+ export GIT_CLIFF__GIT__TAG_PATTERN='v[0-9]*'
+
+ # Update heading format in git-cliff changelog template to include date
+ sed -i 's/## {{ version }}/## {{ version | trim_start_matches(pat="v") }} ({{ timestamp | date(format="%Y-%m-%d") }})/' .github/git-cliff-changelog.toml
+
+ # Generate changelog content and store it in `release-notes.md`
+ git-cliff --config '.github/git-cliff-changelog.toml' --strip header --tag "${NEXT_TAG}" "${PREVIOUS_TAG}^.." \
+ | sed "/## ${PREVIOUS_TAG}\$/,\$d" > release-notes.md
+
+ - name: Upload changelog as build artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: release-notes.md
+ path: release-notes.md
+ if-no-files-found: error
diff --git a/.github/workflows/validate-openapi-spec.yml b/.github/workflows/validate-openapi-spec.yml
index 530c59c9236d..210f82064832 100644
--- a/.github/workflows/validate-openapi-spec.yml
+++ b/.github/workflows/validate-openapi-spec.yml
@@ -16,24 +16,32 @@ jobs:
name: Validate generated OpenAPI spec file
runs-on: ubuntu-latest
steps:
+ - name: Generate a token
+ if: ${{ github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name }}
+ id: generate_token
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ secrets.HYPERSWITCH_BOT_APP_ID }}
+ private-key: ${{ secrets.HYPERSWITCH_BOT_APP_PRIVATE_KEY }}
+
- name: Checkout PR from fork
if: ${{ (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name) }}
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.ref }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
- name: Checkout PR with token
if: ${{ (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) }}
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.ref }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
- token: ${{ secrets.AUTO_FILE_UPDATE_PAT }}
+ token: ${{ steps.generate_token.outputs.token }}
- name: Checkout merge group HEAD commit
if: ${{ github.event_name == 'merge_group' }}
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
ref: ${{ github.event.merge_group.head_sha }}
@@ -44,7 +52,7 @@ jobs:
- name: Generate the OpenAPI spec file
shell: bash
- run: cargo run --features openapi -- generate-openapi-spec
+ run: cargo run -p openapi
- name: Install `swagger-cli`
shell: bash
@@ -60,8 +68,8 @@ jobs:
shell: bash
run: |
if ! git diff --quiet --exit-code -- openapi/openapi_spec.json ; then
- git config --local user.name 'github-actions[bot]'
- git config --local user.email '41898282+github-actions[bot]@users.noreply.github.com'
+ git config --local user.name 'hyperswitch-bot[bot]'
+ git config --local user.email '148525504+hyperswitch-bot[bot]@users.noreply.github.com'
git add openapi/openapi_spec.json
git commit --message 'docs(openapi): re-generate OpenAPI specification'
git push
diff --git a/.gitignore b/.gitignore
index 62804a712fa1..81ef10ad2133 100644
--- a/.gitignore
+++ b/.gitignore
@@ -261,7 +261,3 @@ result*
# node_modules
node_modules/
-
-**/connector_auth.toml
-**/sample_auth.toml
-**/auth.toml
diff --git a/.typos.toml b/.typos.toml
index 1ac38a005c9e..40acb1305892 100644
--- a/.typos.toml
+++ b/.typos.toml
@@ -24,6 +24,7 @@ optin = "optin" # Boku preflow name
optin_id = "optin_id" # Boku's id for optin flow
deriver = "deriver"
Deriver = "Deriver"
+requestor_card_reference = "requestor_card_reference"
[default.extend-words]
aci = "aci" # Name of a connector
@@ -35,6 +36,7 @@ ba = "ba" # ignore minor commit conversions
ede = "ede" # ignore minor commit conversions
daa = "daa" # Commit id
afe = "afe" # Commit id
+Hashi = "Hashi" # HashiCorp
[files]
extend-exclude = [
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 427fa7403e4c..117d4cd90e24 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,1211 @@ All notable changes to HyperSwitch will be documented here.
- - -
+## 2024.02.01.0
+
+### Features
+
+- **dashboard_metadata:** Add email alert for Prod Intent ([#3482](https://github.com/juspay/hyperswitch/pull/3482)) ([`94cd7b6`](https://github.com/juspay/hyperswitch/commit/94cd7b689758a71e13a3eaa655335e658d13afc8))
+- **pm_list:** Add required fields for google pay ([#3196](https://github.com/juspay/hyperswitch/pull/3196)) ([`7f2c434`](https://github.com/juspay/hyperswitch/commit/7f2c434bd29d337dadde8b71a9137797f1c03ec0))
+
+### Bug Fixes
+
+- **configs:** Add configs for Payme 3DS ([#3415](https://github.com/juspay/hyperswitch/pull/3415)) ([`58771b8`](https://github.com/juspay/hyperswitch/commit/58771b8985a53c83185805f770fee26c5836c645))
+
+### Refactors
+
+- **connector:**
+ - [NMI] change error message from not supported to not implemented ([#2848](https://github.com/juspay/hyperswitch/pull/2848)) ([`7575341`](https://github.com/juspay/hyperswitch/commit/757534104ee0411a887c993e45cc1fb883e82992))
+ - [Paypal] Change error message from NotSupported to NotImplemented ([#2877](https://github.com/juspay/hyperswitch/pull/2877)) ([`7251f64`](https://github.com/juspay/hyperswitch/commit/7251f6474fdac3575202971e55638c435ca5c4c8))
+ - [Adyen] change expiresAt time from string to unixtimestamp ([#3506](https://github.com/juspay/hyperswitch/pull/3506)) ([`b7c0f9a`](https://github.com/juspay/hyperswitch/commit/b7c0f9aa098c880314a529bc10015256ce2139f7))
+
+### Miscellaneous Tasks
+
+- **connector_events_fields:** Added refund_id, dispute_id to connector events ([#3424](https://github.com/juspay/hyperswitch/pull/3424)) ([`90a2462`](https://github.com/juspay/hyperswitch/commit/90a24625ce312e4e7681cf4cc470e6365a052f8a))
+
+**Full Changelog:** [`2024.01.31.1...2024.02.01.0`](https://github.com/juspay/hyperswitch/compare/2024.01.31.1...2024.02.01.0)
+
+- - -
+
+## 2024.01.31.1
+
+### Features
+
+- **users:**
+ - Added blacklist for users ([#3469](https://github.com/juspay/hyperswitch/pull/3469)) ([`e331d2d`](https://github.com/juspay/hyperswitch/commit/e331d2d5569405b89052c6bb59f7e755523f6f15))
+ - Add `merchant_id` in `EmailToken` and change user status in reset password ([#3473](https://github.com/juspay/hyperswitch/pull/3473)) ([`db3d53f`](https://github.com/juspay/hyperswitch/commit/db3d53ff1d8b42d107fafe7a6efe7ec9f155d5a0))
+- Add deep health check for analytics ([#3438](https://github.com/juspay/hyperswitch/pull/3438)) ([`7597f3b`](https://github.com/juspay/hyperswitch/commit/7597f3b692124a762c3b212b604938be2d64175a))
+
+### Bug Fixes
+
+- **connector:** [Trustpay] add merchant_id in gpay session response for trustpay ([#3471](https://github.com/juspay/hyperswitch/pull/3471)) ([`20568dc`](https://github.com/juspay/hyperswitch/commit/20568dc976687b8b2bfba12ab2db8926cf1c14ed))
+
+### Miscellaneous Tasks
+
+- **postman:** Update Postman collection files ([`a4b9782`](https://github.com/juspay/hyperswitch/commit/a4b97828be103d601a5007f8e4274837faa6886f))
+
+**Full Changelog:** [`2024.01.31.0...2024.01.31.1`](https://github.com/juspay/hyperswitch/compare/2024.01.31.0...2024.01.31.1)
+
+- - -
+
+## 2024.01.31.0
+
+### Features
+
+- **connector:** [noon] add revoke mandate ([#3487](https://github.com/juspay/hyperswitch/pull/3487)) ([`b5bc8c4`](https://github.com/juspay/hyperswitch/commit/b5bc8c4e7cfdde8251ed0e2e3835ed5e3f1435c4))
+
+### Bug Fixes
+
+- **connector:** [BOA/Cybersource] Handle Invalid Api Secret ([#3485](https://github.com/juspay/hyperswitch/pull/3485)) ([`224c1cf`](https://github.com/juspay/hyperswitch/commit/224c1cf2a421441433097618cc1dd3db224d5915))
+- **user:** Change permission for sample data ([#3462](https://github.com/juspay/hyperswitch/pull/3462)) ([`610c1c5`](https://github.com/juspay/hyperswitch/commit/610c1c575253ddf7a1a31ef941efaae2dd676b48))
+
+### Refactors
+
+- **core:** Restrict requires_customer_action in confirm ([#3235](https://github.com/juspay/hyperswitch/pull/3235)) ([`d2accde`](https://github.com/juspay/hyperswitch/commit/d2accdef410319733d6174057bdca468bde1ae83))
+
+### Miscellaneous Tasks
+
+- **config:** [ADYEN] Add configs for PIX in WASM ([#3498](https://github.com/juspay/hyperswitch/pull/3498)) ([`9821935`](https://github.com/juspay/hyperswitch/commit/9821935933e178765b3b0d0bcbfdf4ab041c3bc2))
+
+**Full Changelog:** [`2024.01.30.1...2024.01.31.0`](https://github.com/juspay/hyperswitch/compare/2024.01.30.1...2024.01.31.0)
+
+- - -
+
+## 2024.01.30.1
+
+### Features
+
+- **config:** Add iDEAL and Sofort Env Configs ([#3492](https://github.com/juspay/hyperswitch/pull/3492)) ([`46c1822`](https://github.com/juspay/hyperswitch/commit/46c1822d0e367e59420c9d087428bc3b12794445))
+- **connector:**
+ - [Bluesnap] Metadata to connector metadata mapping ([#3331](https://github.com/juspay/hyperswitch/pull/3331)) ([`b2afdc3`](https://github.com/juspay/hyperswitch/commit/b2afdc35465426bd11428d8d4ac743617a443128))
+ - [Stripe] Metadata to connector metadata mapping ([#3295](https://github.com/juspay/hyperswitch/pull/3295)) ([`864a8d7`](https://github.com/juspay/hyperswitch/commit/864a8d7b02acda5ea593cae83594962ea249c16d))
+- **core:** Update card_details for an existing mandate ([#3452](https://github.com/juspay/hyperswitch/pull/3452)) ([`02074df`](https://github.com/juspay/hyperswitch/commit/02074dfc23f1a126e76935ba5311c6aed6590ca5))
+- **pm_list:** Add required fields for sofort ([#3192](https://github.com/juspay/hyperswitch/pull/3192)) ([`3d55e3b`](https://github.com/juspay/hyperswitch/commit/3d55e3ba45619978e8ca9e5012c156dc017d2879))
+- **users:** Signin and Verify Email changes for User Invitation changes ([#3420](https://github.com/juspay/hyperswitch/pull/3420)) ([`d91da89`](https://github.com/juspay/hyperswitch/commit/d91da89065a6870f05e1ff9db007d16a58454c84))
+
+### Bug Fixes
+
+- **logging:** Add flow to persistent logs fields ([#3472](https://github.com/juspay/hyperswitch/pull/3472)) ([`ac49103`](https://github.com/juspay/hyperswitch/commit/ac491038b16c77fc7f2249042b35dfb1d58e653d))
+- Empty payment attempts on payment retrieve ([#3447](https://github.com/juspay/hyperswitch/pull/3447)) ([`bec4f2a`](https://github.com/juspay/hyperswitch/commit/bec4f2a24e2236f7814119a6ebf0363cbf598540))
+
+### Refactors
+
+- **payment_link:** Segregated payment link in html css js files, sdk over flow issue, surcharge bug, block SPM customer call for payment link ([#3410](https://github.com/juspay/hyperswitch/pull/3410)) ([`a7bc8c6`](https://github.com/juspay/hyperswitch/commit/a7bc8c655f5b745dccd4d818ac3ceb08c3b80c0e))
+- **settings:** Make the function to deserialize hashsets more generic ([#3104](https://github.com/juspay/hyperswitch/pull/3104)) ([`87191d6`](https://github.com/juspay/hyperswitch/commit/87191d687cd66bf096bfb98ffe51a805b4b76a03))
+- Add support for extending file storage to other schemes and provide a runtime flag for the same ([#3348](https://github.com/juspay/hyperswitch/pull/3348)) ([`a9638d1`](https://github.com/juspay/hyperswitch/commit/a9638d118e0b68653fef3bec2ce8aa3c47feedd3))
+
+### Miscellaneous Tasks
+
+- **analytics:**
+ - Adding status code to connector Kafka events ([#3393](https://github.com/juspay/hyperswitch/pull/3393)) ([`d6807ab`](https://github.com/juspay/hyperswitch/commit/d6807abba46136eabadcbfbc51bce421144dca2c))
+ - Adding dispute id to api log events ([#3450](https://github.com/juspay/hyperswitch/pull/3450)) ([`937aea9`](https://github.com/juspay/hyperswitch/commit/937aea906e759e6e8a76a424db99ed052d46b7d2))
+- **kv:** Add metrics while pushing to stream ([#3364](https://github.com/juspay/hyperswitch/pull/3364)) ([`8c0c49c`](https://github.com/juspay/hyperswitch/commit/8c0c49c6bb02d4ec58242bc90eadfb267c24481e))
+
+**Full Changelog:** [`2024.01.30.0...2024.01.30.1`](https://github.com/juspay/hyperswitch/compare/2024.01.30.0...2024.01.30.1)
+
+- - -
+
+## 2024.01.30.0
+
+### Features
+
+- **router:** Add request_details logger middleware for 400 bad requests ([#3414](https://github.com/juspay/hyperswitch/pull/3414)) ([`dd0d2dc`](https://github.com/juspay/hyperswitch/commit/dd0d2dc2dd9a6263bbb8a99d1f0b2077f38dd621))
+
+### Refactors
+
+- **openapi:** Move openapi to separate crate to decrease compile times ([#3110](https://github.com/juspay/hyperswitch/pull/3110)) ([`7d8d68f`](https://github.com/juspay/hyperswitch/commit/7d8d68faba55dfcb2886c63ae7969ebd4b9ec98c))
+
+### Miscellaneous Tasks
+
+- **configs:** [NMI] add wasm changes for prod dashboard ([#3470](https://github.com/juspay/hyperswitch/pull/3470)) ([`3fbffdc`](https://github.com/juspay/hyperswitch/commit/3fbffdc242dafe7983c542573b7c6362f99331e6))
+
+**Full Changelog:** [`2024.01.29.0...2024.01.30.0`](https://github.com/juspay/hyperswitch/compare/2024.01.29.0...2024.01.30.0)
+
+- - -
+
+## 2024.01.29.0
+
+### Features
+
+- **connector:** [Adyen] Add support for PIX Payment Method ([#3236](https://github.com/juspay/hyperswitch/pull/3236)) ([`fc6e68f`](https://github.com/juspay/hyperswitch/commit/fc6e68f7f07bf2d48466fa493596c0db02d7550a))
+- **core:**
+ - [CYBERSOURCE] Add original authorized amount in router data ([#3417](https://github.com/juspay/hyperswitch/pull/3417)) ([`47fbe48`](https://github.com/juspay/hyperswitch/commit/47fbe486cec252b8befca38f1b7ea77cc0823ee5))
+ - Add outgoing webhook for manual `partial_capture` events ([#3388](https://github.com/juspay/hyperswitch/pull/3388)) ([`d5e9866`](https://github.com/juspay/hyperswitch/commit/d5e9866b522bad3e62f6f6c0d7993f5dcc2939af))
+- **logging:** Add a logging middleware to log all api requests ([#3437](https://github.com/juspay/hyperswitch/pull/3437)) ([`c2946cf`](https://github.com/juspay/hyperswitch/commit/c2946cfe05ffa81a66643e04eff5e89b545d2d43))
+- **user:**
+ - Add support to delete user ([#3374](https://github.com/juspay/hyperswitch/pull/3374)) ([`7777710`](https://github.com/juspay/hyperswitch/commit/777771048a8144aac9e2f837c85531e139ecc125))
+ - Support multiple invites ([#3422](https://github.com/juspay/hyperswitch/pull/3422)) ([`a59ac7d`](https://github.com/juspay/hyperswitch/commit/a59ac7d5b98f27f5fb34206c20ef9c37a07259a3))
+
+### Bug Fixes
+
+- **connector:**
+ - Use `ConnectorError::InvalidConnectorConfig` for an invalid `CoinbaseConnectorMeta` ([#3168](https://github.com/juspay/hyperswitch/pull/3168)) ([`d827c9a`](https://github.com/juspay/hyperswitch/commit/d827c9af29b8516f379e648e00f4ab307ae1a34d))
+ - Fix connector template script ([#3453](https://github.com/juspay/hyperswitch/pull/3453)) ([`9a54838`](https://github.com/juspay/hyperswitch/commit/9a54838b0529013ab8f449ec6b347a104b55f8f7))
+ - [HELCIM] Handle 4XX Errors ([#3458](https://github.com/juspay/hyperswitch/pull/3458)) ([`ec859ea`](https://github.com/juspay/hyperswitch/commit/ec859eabbfb8a511f0fffd30a47a144fb07f2886))
+- **core:** Return surcharge in payment method list response if passed in create request ([#3363](https://github.com/juspay/hyperswitch/pull/3363)) ([`3507ad6`](https://github.com/juspay/hyperswitch/commit/3507ad60b2f1fd84d32eb4d97fe0a847db6f2045))
+- **euclid_wasm:** Include `payouts` feature in `default` features ([#3392](https://github.com/juspay/hyperswitch/pull/3392)) ([`b45e4ca`](https://github.com/juspay/hyperswitch/commit/b45e4ca2a3788823701bdeac2e2a8c1147bb071a))
+
+### Refactors
+
+- **connector:**
+ - [Iatapay] refactor authorize flow and fix payment status mapping ([#2409](https://github.com/juspay/hyperswitch/pull/2409)) ([`f0c7bb9`](https://github.com/juspay/hyperswitch/commit/f0c7bb9a5228f2ee31858fea07abe4ecee9b78a2))
+ - Use utility function to raise payment method not implemented errors ([#1871](https://github.com/juspay/hyperswitch/pull/1871)) ([`66cd5b2`](https://github.com/juspay/hyperswitch/commit/66cd5b2fc9a32085608ed34e0af477dcafe4b957))
+- **payouts:** Propagate `Not Implemented` error ([#3429](https://github.com/juspay/hyperswitch/pull/3429)) ([`5ab4437`](https://github.com/juspay/hyperswitch/commit/5ab44377b84941b8b59f9e73b1d1f0c3889eb02b))
+
+### Miscellaneous Tasks
+
+- **configs:** [Cashtocode] wasm changes for CAD, CHF currency ([#3461](https://github.com/juspay/hyperswitch/pull/3461)) ([`10055c1`](https://github.com/juspay/hyperswitch/commit/10055c1a7354faae8d0f504e0851d2046df5734a))
+
+**Full Changelog:** [`2024.01.25.0...2024.01.29.0`](https://github.com/juspay/hyperswitch/compare/2024.01.25.0...2024.01.29.0)
+
+- - -
+
+## 2024.01.25.0
+
+### Refactors
+
+- **configs:** Add configs for deployments to environments ([#3265](https://github.com/juspay/hyperswitch/pull/3265)) ([`77c1bbb`](https://github.com/juspay/hyperswitch/commit/77c1bbb5a3fe3244cd988ac1260a4a31ae7fcd20))
+
+**Full Changelog:** [`2024.01.24.1...2024.01.25.0`](https://github.com/juspay/hyperswitch/compare/2024.01.24.1...2024.01.25.0)
+
+- - -
+
+## 2024.01.24.1
+
+### Features
+
+- **hashicorp:** Implement hashicorp secrets manager solution ([#3297](https://github.com/juspay/hyperswitch/pull/3297)) ([`629d546`](https://github.com/juspay/hyperswitch/commit/629d546aa7c774e86d609abec3b3ab5cf0d100a7))
+
+### Refactors
+
+- **Router:** [Noon] revert adding new field max_amount to mandate request ([#3435](https://github.com/juspay/hyperswitch/pull/3435)) ([`4cd65a2`](https://github.com/juspay/hyperswitch/commit/4cd65a24f70fdef160eb2d87654f1e30538c3339))
+- **compatibility:** Revert add multiuse mandates support in stripe compatibility ([#3436](https://github.com/juspay/hyperswitch/pull/3436)) ([`8a019f0`](https://github.com/juspay/hyperswitch/commit/8a019f08acf74e04c3ae9c8790dd481301bdcfee))
+
+### Miscellaneous Tasks
+
+- **ckh-source:** Updated ckh analytics source tables ([#3397](https://github.com/juspay/hyperswitch/pull/3397)) ([`3f343d3`](https://github.com/juspay/hyperswitch/commit/3f343d36bff7ce8f73602a2391d205367d5581c7))
+
+**Full Changelog:** [`2024.01.24.0...2024.01.24.1`](https://github.com/juspay/hyperswitch/compare/2024.01.24.0...2024.01.24.1)
+
+- - -
+
+## 2024.01.24.0
+
+### Miscellaneous Tasks
+
+- **postman:** Update Postman collection files ([`7885b2a`](https://github.com/juspay/hyperswitch/commit/7885b2a213f474da3e018ddeb56bc6e407c48471))
+
+**Full Changelog:** [`2024.01.23.0...2024.01.24.0`](https://github.com/juspay/hyperswitch/compare/2024.01.23.0...2024.01.24.0)
+
+- - -
+
+## 2024.01.23.0
+
+### Features
+
+- **compatibility:** Add multiuse mandates support in stripe compatibility ([#3425](https://github.com/juspay/hyperswitch/pull/3425)) ([`4a8104e`](https://github.com/juspay/hyperswitch/commit/4a8104e5f8dd2cfd03de4055baf1256cb7533895))
+
+**Full Changelog:** [`2024.01.22.1...2024.01.23.0`](https://github.com/juspay/hyperswitch/compare/2024.01.22.1...2024.01.23.0)
+
+- - -
+
+## 2024.01.22.1
+
+### Features
+
+- **core:** Send `customer_name` to connectors when creating customer ([#3380](https://github.com/juspay/hyperswitch/pull/3380)) ([`7813cee`](https://github.com/juspay/hyperswitch/commit/7813ceece2081b73f1374e2ee5a9a673f0b72127))
+
+### Miscellaneous Tasks
+
+- Chore(deps): bump the cargo group across 1 directories with 3 updates ([#3409](https://github.com/juspay/hyperswitch/pull/3409)) ([`6c46e9c`](https://github.com/juspay/hyperswitch/commit/6c46e9c19b304bb11f304e60c46e8abf67accf6d))
+
+**Full Changelog:** [`2024.01.22.0...2024.01.22.1`](https://github.com/juspay/hyperswitch/compare/2024.01.22.0...2024.01.22.1)
+
+- - -
+
+## 2024.01.22.0
+
+### Features
+
+- **user_roles:** Add accept invitation API and `UserJWTAuth` ([#3365](https://github.com/juspay/hyperswitch/pull/3365)) ([`a47372a`](https://github.com/juspay/hyperswitch/commit/a47372a451b60defda35fa212565b889ed5b2d2b))
+
+### Documentation
+
+- Add link to api docs ([#3405](https://github.com/juspay/hyperswitch/pull/3405)) ([`4e1e78e`](https://github.com/juspay/hyperswitch/commit/4e1e78ecd962f4b34fa04f611f03e8e6f6e1bd7c))
+
+**Full Changelog:** [`2024.01.19.1...2024.01.22.0`](https://github.com/juspay/hyperswitch/compare/2024.01.19.1...2024.01.22.0)
+
+- - -
+
+## 2024.01.19.1
+
+### Bug Fixes
+
+- **connector:** [CRYPTOPAY] Fix header generation for PSYNC ([#3402](https://github.com/juspay/hyperswitch/pull/3402)) ([`ec16ed0`](https://github.com/juspay/hyperswitch/commit/ec16ed0f82f258c5699d54a386f67aff06c0d144))
+- **frm:** Update FRM manual review flow ([#3176](https://github.com/juspay/hyperswitch/pull/3176)) ([`5255ba9`](https://github.com/juspay/hyperswitch/commit/5255ba9170c633899cd4c3bbe24a44b429546f15))
+
+### Refactors
+
+- Rename `s3` feature flag to `aws_s3` ([#3341](https://github.com/juspay/hyperswitch/pull/3341)) ([`1c04ac7`](https://github.com/juspay/hyperswitch/commit/1c04ac751240f5c931df0f282af1e0ad745e9509))
+
+**Full Changelog:** [`2024.01.19.0...2024.01.19.1`](https://github.com/juspay/hyperswitch/compare/2024.01.19.0...2024.01.19.1)
+
+- - -
+
+## 2024.01.19.0
+
+### Features
+
+- **users:**
+ - Add `preferred_merchant_id` column and update user details API ([#3373](https://github.com/juspay/hyperswitch/pull/3373)) ([`862a1b5`](https://github.com/juspay/hyperswitch/commit/862a1b5303ff304cca41d3553f652fd1091aab9b))
+ - Added get role from jwt api ([#3385](https://github.com/juspay/hyperswitch/pull/3385)) ([`7516a16`](https://github.com/juspay/hyperswitch/commit/7516a16763877c03ecc35fda19388bbd021c5cc7))
+
+### Refactors
+
+- **recon:** Update recipient email and mail body for ProFeatureRequest ([#3381](https://github.com/juspay/hyperswitch/pull/3381)) ([`5a791aa`](https://github.com/juspay/hyperswitch/commit/5a791aaf4dc05e8ffdb60464a03b6fc41f860581))
+
+**Full Changelog:** [`2024.01.18.1...2024.01.19.0`](https://github.com/juspay/hyperswitch/compare/2024.01.18.1...2024.01.19.0)
+
+- - -
+
+## 2024.01.18.1
+
+### Bug Fixes
+
+- **connector:**
+ - Trustpay zen error mapping ([#3255](https://github.com/juspay/hyperswitch/pull/3255)) ([`e816ccf`](https://github.com/juspay/hyperswitch/commit/e816ccfbdd7b0e24464aa93421e399d63f23b17c))
+ - [Cashtocode] update amount from i64 to f64 in webhook payload ([#3382](https://github.com/juspay/hyperswitch/pull/3382)) ([`059e866`](https://github.com/juspay/hyperswitch/commit/059e86607dc271c25bb3d23f5adfc7d5f21f62fb))
+- **metrics:** Add TASKS_ADDED_COUNT and TASKS_RESET_COUNT metrics in router scheduler flow ([#3189](https://github.com/juspay/hyperswitch/pull/3189)) ([`b4df40d`](https://github.com/juspay/hyperswitch/commit/b4df40db25f6ea743c7a25db47e8f1d8e0d544e3))
+- **user:** Fetch profile_id for sample data ([#3358](https://github.com/juspay/hyperswitch/pull/3358)) ([`2f693ad`](https://github.com/juspay/hyperswitch/commit/2f693ad1fd857280ef30c6cc0297fb926f0e79e8))
+
+### Refactors
+
+- **connector:** [Volt] Refactor Payments and Refunds Webhooks ([#3377](https://github.com/juspay/hyperswitch/pull/3377)) ([`acb3296`](https://github.com/juspay/hyperswitch/commit/acb329672297cd7337d0b0239e4c662257812e8a))
+- **core:** Add locker config to enable or disable locker ([#3352](https://github.com/juspay/hyperswitch/pull/3352)) ([`bd5356e`](https://github.com/juspay/hyperswitch/commit/bd5356e7e7cf61f9d07fe9b67c9c5bb38fddf9c7))
+
+**Full Changelog:** [`2024.01.18.0...2024.01.18.1`](https://github.com/juspay/hyperswitch/compare/2024.01.18.0...2024.01.18.1)
+
+- - -
+
+## 2024.01.18.0
+
+### Features
+
+- **connector_events:** Added api to fetch connector event logs ([#3319](https://github.com/juspay/hyperswitch/pull/3319)) ([`68a3a28`](https://github.com/juspay/hyperswitch/commit/68a3a280676c8309f9becffae545b134b5e1f2ea))
+- **payment_method:** Add capability to store bank details using /payment_methods endpoint ([#3113](https://github.com/juspay/hyperswitch/pull/3113)) ([`01c2de2`](https://github.com/juspay/hyperswitch/commit/01c2de223f60595d77c06a59a40dfe041e02cfee))
+
+### Bug Fixes
+
+- **core:** Add validation for authtype and metadata in update payment connector ([#3305](https://github.com/juspay/hyperswitch/pull/3305)) ([`52f38d3`](https://github.com/juspay/hyperswitch/commit/52f38d3d5a7d035e8211e1f51c8f982232e2d7ab))
+- **events:** Fix event generation for paymentmethods list ([#3337](https://github.com/juspay/hyperswitch/pull/3337)) ([`ac8d81b`](https://github.com/juspay/hyperswitch/commit/ac8d81b32b3d91b875113d32782a8c62e39ba2a8))
+
+### Refactors
+
+- **connector:** [cybersource] recurring mandate flow ([#3354](https://github.com/juspay/hyperswitch/pull/3354)) ([`387c1c4`](https://github.com/juspay/hyperswitch/commit/387c1c491bdc413ae361d04f0be25eaa58e72fa9))
+- [Noon] adding new field max_amount to mandate request ([#3209](https://github.com/juspay/hyperswitch/pull/3209)) ([`eb2a61d`](https://github.com/juspay/hyperswitch/commit/eb2a61d8597995838f21b8233653c691118b2191))
+
+### Miscellaneous Tasks
+
+- **router:** Remove recon from default features ([#3370](https://github.com/juspay/hyperswitch/pull/3370)) ([`928beec`](https://github.com/juspay/hyperswitch/commit/928beecdd7fe9e09b38ffe750627ca4af94ffc93))
+
+**Full Changelog:** [`2024.01.17.0...2024.01.18.0`](https://github.com/juspay/hyperswitch/compare/2024.01.17.0...2024.01.18.0)
+
+- - -
+
+## 2024.01.17.0
+
+### Features
+
+- **connector:** [BANKOFAMERICA] Implement 3DS flow for cards ([#3343](https://github.com/juspay/hyperswitch/pull/3343)) ([`d533c98`](https://github.com/juspay/hyperswitch/commit/d533c98b5107fb6876c11b183eb9bc382a77a2f1))
+- **recon:** Add recon APIs ([#3345](https://github.com/juspay/hyperswitch/pull/3345)) ([`8678f8d`](https://github.com/juspay/hyperswitch/commit/8678f8d1448b5ce430931bfbbc269ef979d9eea7))
+
+### Bug Fixes
+
+- **connector_onboarding:** Check if connector exists for the merchant account and add reset tracking id API ([#3229](https://github.com/juspay/hyperswitch/pull/3229)) ([`58cc8d6`](https://github.com/juspay/hyperswitch/commit/58cc8d6109ce49d385b06c762ab3f6670f5094eb))
+- **payment_link:** Added expires_on in payment response ([#3332](https://github.com/juspay/hyperswitch/pull/3332)) ([`5ad3f89`](https://github.com/juspay/hyperswitch/commit/5ad3f8939afafce3eec39704dcaa92270b384dcd))
+
+**Full Changelog:** [`2024.01.12.1...2024.01.17.0`](https://github.com/juspay/hyperswitch/compare/2024.01.12.1...2024.01.17.0)
+
+- - -
+
+## 2024.01.12.1
+
+### Miscellaneous Tasks
+
+- **config:** Add merchant_secret config for webhooks for cashtocode and volt in wasm dashboard ([#3333](https://github.com/juspay/hyperswitch/pull/3333)) ([`57f2cff`](https://github.com/juspay/hyperswitch/commit/57f2cff75e58b0a7811492a1fdb636f59dcefbd0))
+- Add api reference for blocklist ([#3336](https://github.com/juspay/hyperswitch/pull/3336)) ([`f381d86`](https://github.com/juspay/hyperswitch/commit/f381d86b7c9fa79d632991c74cab53d0181231c6))
+
+**Full Changelog:** [`2024.01.12.0...2024.01.12.1`](https://github.com/juspay/hyperswitch/compare/2024.01.12.0...2024.01.12.1)
+
+- - -
+
+## 2024.01.12.0
+
+### Features
+
+- **connector:**
+ - [BOA/Cyb] Include merchant metadata in capture and void requests ([#3308](https://github.com/juspay/hyperswitch/pull/3308)) ([`5a5400c`](https://github.com/juspay/hyperswitch/commit/5a5400cf5b539996b2f327c51d4a07b4a86fd1be))
+ - [Volt] Add support for refund webhooks ([#3326](https://github.com/juspay/hyperswitch/pull/3326)) ([`e376f68`](https://github.com/juspay/hyperswitch/commit/e376f68c167a289957a4372df108797088ab1f6e))
+ - [BOA/CYB] Store AVS response in connector_metadata ([#3271](https://github.com/juspay/hyperswitch/pull/3271)) ([`e75b11e`](https://github.com/juspay/hyperswitch/commit/e75b11e98ac4c8d37c842c8ee0ccf361dcb52793))
+- **euclid_wasm:** Config changes for NMI ([#3329](https://github.com/juspay/hyperswitch/pull/3329)) ([`ed07c5b`](https://github.com/juspay/hyperswitch/commit/ed07c5ba90868a3132ca90d72219db3ba8978232))
+- **outgoingwebhookevent:** Adding api for query to fetch outgoing webhook events log ([#3310](https://github.com/juspay/hyperswitch/pull/3310)) ([`54d44be`](https://github.com/juspay/hyperswitch/commit/54d44bef730c0679f3535f66e89e88139d70ba2e))
+- **payment_link:** Added sdk layout option payment link ([#3207](https://github.com/juspay/hyperswitch/pull/3207)) ([`6117652`](https://github.com/juspay/hyperswitch/commit/61176524ca0c11c605538a1da9a267837193e1ec))
+- **router:** Payment_method block ([#3056](https://github.com/juspay/hyperswitch/pull/3056)) ([`bb09613`](https://github.com/juspay/hyperswitch/commit/bb096138b5937092badd02741fb869ee35e2e3cc))
+- **users:** Invite user without email ([#3328](https://github.com/juspay/hyperswitch/pull/3328)) ([`6a47063`](https://github.com/juspay/hyperswitch/commit/6a4706323c61f3722dc543993c55084dc9ff9850))
+- Feat(connector): [cybersource] Implement 3DS flow for cards ([#3290](https://github.com/juspay/hyperswitch/pull/3290)) ([`6fb3b00`](https://github.com/juspay/hyperswitch/commit/6fb3b00e82d1e3c03dc1c816ffa6353cc7991a53))
+- Add support for card extended bin in payment attempt ([#3312](https://github.com/juspay/hyperswitch/pull/3312)) ([`cc3eefd`](https://github.com/juspay/hyperswitch/commit/cc3eefd317117d761cdcc76804f3510952d4cec2))
+
+### Bug Fixes
+
+- **core:** Surcharge with saved card failure ([#3318](https://github.com/juspay/hyperswitch/pull/3318)) ([`5a1a3da`](https://github.com/juspay/hyperswitch/commit/5a1a3da7502ce9e13546b896477d82719162d5b6))
+- **refund:** Add merchant_connector_id in refund ([#3303](https://github.com/juspay/hyperswitch/pull/3303)) ([`af43b07`](https://github.com/juspay/hyperswitch/commit/af43b07e4394458db478bc16e5fb8d3b0d636a31))
+- **router:** Add config to avoid connector tokenization for `apple pay` `simplified flow` ([#3234](https://github.com/juspay/hyperswitch/pull/3234)) ([`4f9c04b`](https://github.com/juspay/hyperswitch/commit/4f9c04b856761b9c0486abad4c36de191da2c460))
+- Update amount_capturable based on intent_status and payment flow ([#3278](https://github.com/juspay/hyperswitch/pull/3278)) ([`469ea20`](https://github.com/juspay/hyperswitch/commit/469ea20214aa7c1a3b4b86520724c2509ae37b0b))
+
+### Refactors
+
+- **router:**
+ - Flagged order_details validation to skip validation ([#3116](https://github.com/juspay/hyperswitch/pull/3116)) ([`8626bda`](https://github.com/juspay/hyperswitch/commit/8626bda6d5aa9e7531edc7ea50ed4f30c3b7227a))
+ - Restricted list payment method Customer to api-key based ([#3100](https://github.com/juspay/hyperswitch/pull/3100)) ([`9eaebe8`](https://github.com/juspay/hyperswitch/commit/9eaebe8db3d83105ef1e8fc784241e1fb795dd22))
+
+### Miscellaneous Tasks
+
+- Remove connector auth TOML files from `.gitignore` and `.dockerignore` ([#3330](https://github.com/juspay/hyperswitch/pull/3330)) ([`9f6ef3f`](https://github.com/juspay/hyperswitch/commit/9f6ef3f2240052053b5b7df0a13a5503d8141d56))
+
+**Full Changelog:** [`2024.01.11.0...2024.01.12.0`](https://github.com/juspay/hyperswitch/compare/2024.01.11.0...2024.01.12.0)
+
+- - -
+
+## 2024.01.11.0
+
+### Features
+
+- **core:** Add new payments webhook events ([#3212](https://github.com/juspay/hyperswitch/pull/3212)) ([`e0e28b8`](https://github.com/juspay/hyperswitch/commit/e0e28b87c0647252918ef110cd7614c46b5cf943))
+- **payment_link:** Add status page for payment link ([#3213](https://github.com/juspay/hyperswitch/pull/3213)) ([`50e4d79`](https://github.com/juspay/hyperswitch/commit/50e4d797da31b570b5920b33d77c24a21d9871e2))
+
+### Bug Fixes
+
+- **euclid_wasm:** Update braintree config prod ([#3288](https://github.com/juspay/hyperswitch/pull/3288)) ([`8830563`](https://github.com/juspay/hyperswitch/commit/8830563748ed20c40b7a21a66e9ad9fd02ddcf0e))
+
+### Refactors
+
+- **connector:** [bluesnap] add connector_txn_id fallback for webhook ([#3315](https://github.com/juspay/hyperswitch/pull/3315)) ([`a69e876`](https://github.com/juspay/hyperswitch/commit/a69e876f8212cb94202686e073005c23b1b2fc35))
+- Removed basilisk feature ([#3281](https://github.com/juspay/hyperswitch/pull/3281)) ([`612f8d9`](https://github.com/juspay/hyperswitch/commit/612f8d9d5f5bcba78aa64c3128cc72be0f2860ea))
+
+### Miscellaneous Tasks
+
+- Nits and small code improvements found during investigation of PR#3168 ([#3259](https://github.com/juspay/hyperswitch/pull/3259)) ([`fe3cf54`](https://github.com/juspay/hyperswitch/commit/fe3cf54781302c733c1682ded2c1735544407a5f))
+
+**Full Changelog:** [`2024.01.10.0...2024.01.11.0`](https://github.com/juspay/hyperswitch/compare/2024.01.10.0...2024.01.11.0)
+
+- - -
+
+## 2024.01.10.0
+
+### Features
+
+- **Connector:** [VOLT] Add support for Payments Webhooks ([#3155](https://github.com/juspay/hyperswitch/pull/3155)) ([`eba7896`](https://github.com/juspay/hyperswitch/commit/eba789640b72cdfbc17d0994d16ce111a1788fe5))
+- **pm_list:** Add required fields for Ideal ([#3183](https://github.com/juspay/hyperswitch/pull/3183)) ([`1c3c5f6`](https://github.com/juspay/hyperswitch/commit/1c3c5f6b0cff9a0037175ba92c002cdf4249108d))
+
+### Bug Fixes
+
+- **connector:**
+ - [BOA/CYB] Fix Metadata Error ([#3283](https://github.com/juspay/hyperswitch/pull/3283)) ([`71044a1`](https://github.com/juspay/hyperswitch/commit/71044a14ed87ac0cd7d2bb2009f0e59c79bd344c))
+ - [BOA, Cybersource] capture error_code ([#3239](https://github.com/juspay/hyperswitch/pull/3239)) ([`ecf51b5`](https://github.com/juspay/hyperswitch/commit/ecf51b5e3a30f055634edfafcd36f64cef535a53))
+- **outgoingwebhookevents:** Throw an error when outgoing webhook events env var not found ([#3291](https://github.com/juspay/hyperswitch/pull/3291)) ([`ee044a0`](https://github.com/juspay/hyperswitch/commit/ee044a0be811a53842c69f64c27d9995d84b7040))
+- **users:** Added merchant name is list merchants ([#3289](https://github.com/juspay/hyperswitch/pull/3289)) ([`8a354f4`](https://github.com/juspay/hyperswitch/commit/8a354f42295a3167d0e846c9522bc091ebdca3f4))
+- **wasm:** Fix failing `wasm-pack build` for `euclid_wasm` ([#3284](https://github.com/juspay/hyperswitch/pull/3284)) ([`5eb6711`](https://github.com/juspay/hyperswitch/commit/5eb67114646674fe227f073e417f26beb97e9a43))
+
+### Refactors
+
+- Pass customer object to `make_pm_data` ([#3246](https://github.com/juspay/hyperswitch/pull/3246)) ([`36c32c3`](https://github.com/juspay/hyperswitch/commit/36c32c377ae788c96b578303eae5d029e3044b7c))
+
+### Miscellaneous Tasks
+
+- **postman:** Update Postman collection files ([`8fc68ad`](https://github.com/juspay/hyperswitch/commit/8fc68adc7fb6a23d4a2970a05f5739db6010a53d))
+
+**Full Changelog:** [`2024.01.08.0...2024.01.10.0`](https://github.com/juspay/hyperswitch/compare/2024.01.08.0...2024.01.10.0)
+
+- - -
+
+## 2024.01.08.0
+
+### Features
+
+- **analytics:** Adding outgoing webhooks kafka event ([#3140](https://github.com/juspay/hyperswitch/pull/3140)) ([`1d26df2`](https://github.com/juspay/hyperswitch/commit/1d26df28bc5e1db359272b40adae70bfba9b7360))
+- **connector:** Add Revoke mandate flow ([#3261](https://github.com/juspay/hyperswitch/pull/3261)) ([`90ac26a`](https://github.com/juspay/hyperswitch/commit/90ac26a92f837568be5181108fdb1272171bbf23))
+- **merchant_account:** Add list multiple merchants in `MerchantAccountInterface` ([#3220](https://github.com/juspay/hyperswitch/pull/3220)) ([`c3172ef`](https://github.com/juspay/hyperswitch/commit/c3172ef60603325a1d9e5cab45e72d23a383e218))
+- **payments:** Add payment id in all the payment logs ([#3142](https://github.com/juspay/hyperswitch/pull/3142)) ([`7766245`](https://github.com/juspay/hyperswitch/commit/7766245478f72b0bc942922b1138c87a239be153))
+- **pm_list:** Add required fields for eps ([#3169](https://github.com/juspay/hyperswitch/pull/3169)) ([`bfd8a5a`](https://github.com/juspay/hyperswitch/commit/bfd8a5a31abb3c95cc9ca21689d5c30a6dc4ce8d))
+- Add deep health check ([#3210](https://github.com/juspay/hyperswitch/pull/3210)) ([`f30ba89`](https://github.com/juspay/hyperswitch/commit/f30ba89884d3abf2356cf1870d833a97d2411f69))
+- Include version number in response headers and on application startup ([#3045](https://github.com/juspay/hyperswitch/pull/3045)) ([`252443a`](https://github.com/juspay/hyperswitch/commit/252443a50dc48939eb08b3bcd67273bb71bbe349))
+
+### Bug Fixes
+
+- **analytics:**
+ - Fixed response code to 501 ([#3119](https://github.com/juspay/hyperswitch/pull/3119)) ([`00008c1`](https://github.com/juspay/hyperswitch/commit/00008c16c1c20f1f34381d0fc7e55ef05183e776))
+ - Added response to the connector outgoing event ([#3129](https://github.com/juspay/hyperswitch/pull/3129)) ([`d152c3a`](https://github.com/juspay/hyperswitch/commit/d152c3a1ca70c39f5c64edf63b5995f6cf02c88a))
+- **connector:**
+ - [NMI] Populating `ErrorResponse` with required fields and Mapping `connector_response_reference_id` ([#3214](https://github.com/juspay/hyperswitch/pull/3214)) ([`64babd3`](https://github.com/juspay/hyperswitch/commit/64babd34786ba8e6f63aa1dba1cbd1bc6264f2ac))
+ - [Stripe] Deserialization Error while parsing Dispute Webhook Body ([#3256](https://github.com/juspay/hyperswitch/pull/3256)) ([`01b4ac3`](https://github.com/juspay/hyperswitch/commit/01b4ac30e40a55b05fe3585d0544b21125762bc7))
+- **router:**
+ - Multiple incremental_authorizations with kv enabled ([#3185](https://github.com/juspay/hyperswitch/pull/3185)) ([`f78d02d`](https://github.com/juspay/hyperswitch/commit/f78d02d981dd7b35f2150f204b327847b811badd))
+ - Payment link api contract change ([#2975](https://github.com/juspay/hyperswitch/pull/2975)) ([`3cd7496`](https://github.com/juspay/hyperswitch/commit/3cd74966b279dc1c43935dc1bceb1c69b9eb0643))
+- **user:** Add integration_completed enum in metadata type ([#3245](https://github.com/juspay/hyperswitch/pull/3245)) ([`3ab71fb`](https://github.com/juspay/hyperswitch/commit/3ab71fbd5ac86f12cf19d17561e428d33c51a4cf))
+- **users:** Fix wrong redirection url in magic link ([#3217](https://github.com/juspay/hyperswitch/pull/3217)) ([`000e644`](https://github.com/juspay/hyperswitch/commit/000e64438838461ea930545405fb2ee0d3c4356c))
+- Introduce net_amount field in payment response ([#3115](https://github.com/juspay/hyperswitch/pull/3115)) ([`23e0c63`](https://github.com/juspay/hyperswitch/commit/23e0c6354185d666771c07b8534e42380cc50812))
+
+### Refactors
+
+- **api_lock:** Allow api lock on psync only when force sync is true ([#3242](https://github.com/juspay/hyperswitch/pull/3242)) ([`ac5349c`](https://github.com/juspay/hyperswitch/commit/ac5349cd7160f67f7a56f48f54981cf3dc1e5b52))
+- **drainer:** Change logic for trimming the stream and refactor for modularity ([#3128](https://github.com/juspay/hyperswitch/pull/3128)) ([`de7a607`](https://github.com/juspay/hyperswitch/commit/de7a607e66847ff4bbddcbbafa50d54a56f02f62))
+- **euclid_wasm:** Update wasm config ([#3222](https://github.com/juspay/hyperswitch/pull/3222)) ([`7ea50c3`](https://github.com/juspay/hyperswitch/commit/7ea50c3a78bc1a091077c23999a69dda1cf0f463))
+- Address panics due to indexing and slicing ([#3233](https://github.com/juspay/hyperswitch/pull/3233)) ([`34318bc`](https://github.com/juspay/hyperswitch/commit/34318bc1f12a1298e8993021a2d516cf86049980))
+
+### Miscellaneous Tasks
+
+- Address Rust 1.75 clippy lints ([#3231](https://github.com/juspay/hyperswitch/pull/3231)) ([`c8279b1`](https://github.com/juspay/hyperswitch/commit/c8279b110e6c55784f042aebb956931e1870b0ca))
+
+**Full Changelog:** [`v1.106.1...2024.01.08.0`](https://github.com/juspay/hyperswitch/compare/v1.106.1...2024.01.08.0)
+
+- - -
+
+## 1.106.1 (2024-01-05)
+
+### Bug Fixes
+
+- **connector:** [iatapay] change refund amount ([#3244](https://github.com/juspay/hyperswitch/pull/3244)) ([`e79604b`](https://github.com/juspay/hyperswitch/commit/e79604bd4681a69802f3c3169dd94424e3688e42))
+
+**Full Changelog:** [`v1.106.0...v1.106.1`](https://github.com/juspay/hyperswitch/compare/v1.106.0...v1.106.1)
+
+- - -
+
+
+## 1.106.0 (2024-01-04)
+
+### Features
+
+- **connector:**
+ - [BOA] Populate merchant_defined_information with metadata ([#3208](https://github.com/juspay/hyperswitch/pull/3208)) ([`18eca7e`](https://github.com/juspay/hyperswitch/commit/18eca7e9fbe6cdc101bd135c4618882b7a5455bf))
+ - [CYBERSOURCE] Refactor cybersource ([#3215](https://github.com/juspay/hyperswitch/pull/3215)) ([`e06ba14`](https://github.com/juspay/hyperswitch/commit/e06ba148b666772fe79d7050d0c505dd2f04f87c))
+- **customers:** Add JWT Authentication for `/customers` APIs ([#3179](https://github.com/juspay/hyperswitch/pull/3179)) ([`aefe618`](https://github.com/juspay/hyperswitch/commit/aefe6184ec3e3156877c72988ca0f92454a47e7d))
+
+### Bug Fixes
+
+- **connector:** [Volt] Error handling for auth response ([#3187](https://github.com/juspay/hyperswitch/pull/3187)) ([`a51c54d`](https://github.com/juspay/hyperswitch/commit/a51c54d39d3687c6a06176895435ac66fa194d7b))
+- **core:** Fix recurring mandates flow for cyber source ([#3224](https://github.com/juspay/hyperswitch/pull/3224)) ([`6a1743e`](https://github.com/juspay/hyperswitch/commit/6a1743ebe993d5abb53f2ce1b8b383aa4a9553fb))
+- **middleware:** Add support for logging request-id sent in request ([#3225](https://github.com/juspay/hyperswitch/pull/3225)) ([`0f72b55`](https://github.com/juspay/hyperswitch/commit/0f72b5527aab221b8e69e737e5d19abdd0696150))
+
+### Refactors
+
+- **connector:** [NMI] Include mandatory fields for card 3DS ([#3203](https://github.com/juspay/hyperswitch/pull/3203)) ([`a46b8a7`](https://github.com/juspay/hyperswitch/commit/a46b8a7b05367fbbdbf4fca89d8a6b29110a4e1c))
+
+### Testing
+
+- **postman:** Update postman collection files ([`0248d35`](https://github.com/juspay/hyperswitch/commit/0248d35dd49d2dc7e5e4da6b60a3ee3577c8eac9))
+
+### Miscellaneous Tasks
+
+- Fix channel handling for consumer workflow loop ([#3223](https://github.com/juspay/hyperswitch/pull/3223)) ([`51e1fac`](https://github.com/juspay/hyperswitch/commit/51e1fac556fdd8775e0bbc858b0b3cc50a7e88ec))
+
+**Full Changelog:** [`v1.105.0...v1.106.0`](https://github.com/juspay/hyperswitch/compare/v1.105.0...v1.106.0)
+
+- - -
+
+
+## 1.105.0 (2023-12-23)
+
+### Features
+
+- **connector:** [BOA/CYBERSOURCE] Populate connector_transaction_id ([#3202](https://github.com/juspay/hyperswitch/pull/3202)) ([`110d3d2`](https://github.com/juspay/hyperswitch/commit/110d3d211be2edf47533cc5297ae159cad0e5034))
+
+**Full Changelog:** [`v1.104.0...v1.105.0`](https://github.com/juspay/hyperswitch/compare/v1.104.0...v1.105.0)
+
+- - -
+
+
+## 1.104.0 (2023-12-22)
+
+### Features
+
+- **connector:** [BOA] Implement apple pay manual flow ([#3191](https://github.com/juspay/hyperswitch/pull/3191)) ([`25fd3d5`](https://github.com/juspay/hyperswitch/commit/25fd3d502e48f10dd3acbdc88caea4007310d4ee))
+- **router:** Make the billing country for apple pay as optional field ([#3188](https://github.com/juspay/hyperswitch/pull/3188)) ([`15987cc`](https://github.com/juspay/hyperswitch/commit/15987cc81ecba3c1d0de4fa0a12424066a8842eb))
+
+### Bug Fixes
+
+- **connector:**
+ - [Trustpay] Use `connector_request_reference_id` for merchant reference instead of `payment_id` ([#2885](https://github.com/juspay/hyperswitch/pull/2885)) ([`c51c761`](https://github.com/juspay/hyperswitch/commit/c51c761677e8c5ff80de40f8796f340cf1331f96))
+ - [BOA/Cyb] Truncate state length to <20 ([#3198](https://github.com/juspay/hyperswitch/pull/3198)) ([`79a18e2`](https://github.com/juspay/hyperswitch/commit/79a18e2bf7bb1f338cf982fb1a152add2ed4e087))
+ - [Iatapay] fix error response handling when payment is failed ([#3197](https://github.com/juspay/hyperswitch/pull/3197)) ([`716a74c`](https://github.com/juspay/hyperswitch/commit/716a74cf8449583541c426a5c427c9e32f5b2528))
+ - [BOA] Display 2XX Failure Errors ([#3200](https://github.com/juspay/hyperswitch/pull/3200)) ([`07fd9be`](https://github.com/juspay/hyperswitch/commit/07fd9bedf02a1d70fc248fbbab480a5e24a7f077))
+ - [CYBERSOURCE] Display 2XX Failure Errors ([#3201](https://github.com/juspay/hyperswitch/pull/3201)) ([`86c2622`](https://github.com/juspay/hyperswitch/commit/86c26221357e14b585f44c6ebe46962c085f6552))
+- **users:** Wrong `user_role` insertion in `invite_user` for new users ([#3193](https://github.com/juspay/hyperswitch/pull/3193)) ([`b06a8d6`](https://github.com/juspay/hyperswitch/commit/b06a8d6e0d7fc4fb1bec30f702d64f0bd5e1068e))
+
+**Full Changelog:** [`v1.103.1...v1.104.0`](https://github.com/juspay/hyperswitch/compare/v1.103.1...v1.104.0)
+
+- - -
+
+
+## 1.103.1 (2023-12-21)
+
+### Bug Fixes
+
+- **connector:**
+ - Remove set_body method for connectors implementing default get_request_body ([#3182](https://github.com/juspay/hyperswitch/pull/3182)) ([`a5e141b`](https://github.com/juspay/hyperswitch/commit/a5e141b542622e7065f0e0070a3cddacde78fd8a))
+ - [Paypal] remove shipping address as mandatory field for paypal wallet ([#3181](https://github.com/juspay/hyperswitch/pull/3181)) ([`680ed60`](https://github.com/juspay/hyperswitch/commit/680ed603c5113ec29fbd13c4c633e18ad4ad10ee))
+
+**Full Changelog:** [`v1.103.0...v1.103.1`](https://github.com/juspay/hyperswitch/compare/v1.103.0...v1.103.1)
+
+- - -
+
+
+## 1.103.0 (2023-12-20)
+
+### Features
+
+- **connector:**
+ - [NMI] Implement webhook for Payments and Refunds ([#3164](https://github.com/juspay/hyperswitch/pull/3164)) ([`30c1401`](https://github.com/juspay/hyperswitch/commit/30c14019d067ad5f105563f205eb1941010233e8))
+ - [BOA] Handle BOA 5XX errors ([#3178](https://github.com/juspay/hyperswitch/pull/3178)) ([`1d80949`](https://github.com/juspay/hyperswitch/commit/1d80949bef1228bf432dc445eaba15afccb030bd))
+- **connector-config:** Add wasm support for dashboard connector configuration ([#3138](https://github.com/juspay/hyperswitch/pull/3138)) ([`b0ffbe9`](https://github.com/juspay/hyperswitch/commit/b0ffbe9355b7e38226994c1ccbbe80cdbc77adde))
+- **db:** Implement `AuthorizationInterface` for `MockDb` ([#3151](https://github.com/juspay/hyperswitch/pull/3151)) ([`396a64f`](https://github.com/juspay/hyperswitch/commit/396a64f3bbad6e75d4b263286a7ef6a2f09b180e))
+- **postman:** [Prophetpay] Add test cases ([#2946](https://github.com/juspay/hyperswitch/pull/2946)) ([`583d7b8`](https://github.com/juspay/hyperswitch/commit/583d7b87a711102e4e62417f3191ac837886eca9))
+
+### Bug Fixes
+
+- **connector:**
+ - [NMI] Fix response deserialization for vault id creation ([#3166](https://github.com/juspay/hyperswitch/pull/3166)) ([`d44daaf`](https://github.com/juspay/hyperswitch/commit/d44daaf539021a9cbc33c9391172c38825d74dcd))
+ - Connector wise validation for zero auth flow ([#3159](https://github.com/juspay/hyperswitch/pull/3159)) ([`45ba128`](https://github.com/juspay/hyperswitch/commit/45ba128b6ab39f513dd114567d9915acf0eaea20))
+- **events:** Add logger for incoming webhook payload ([#3171](https://github.com/juspay/hyperswitch/pull/3171)) ([`cf47a65`](https://github.com/juspay/hyperswitch/commit/cf47a65916fd4fb5c996946ffd579fd6755d02f7))
+- **users:** Send correct `user_role` values in `switch_merchant` response ([#3167](https://github.com/juspay/hyperswitch/pull/3167)) ([`dc589d5`](https://github.com/juspay/hyperswitch/commit/dc589d580f1382874bc755d3719bd3244fdedc67))
+
+### Refactors
+
+- **core:** Fix payment status for 4xx ([#3177](https://github.com/juspay/hyperswitch/pull/3177)) ([`e7949c2`](https://github.com/juspay/hyperswitch/commit/e7949c23b9be56a4cd763d4990c1a95c0fefae95))
+- **payment_methods:** Make the card_holder_name as an empty string if not sent ([#3173](https://github.com/juspay/hyperswitch/pull/3173)) ([`b98e53d`](https://github.com/juspay/hyperswitch/commit/b98e53d5cba5a5af04ada9bd83fa7bd2e27462d9))
+
+### Testing
+
+- **postman:** Update postman collection files ([`6890e90`](https://github.com/juspay/hyperswitch/commit/6890e9029d90bfd518ba23979a0bd507853dc983))
+
+### Documentation
+
+- **connector:** Update connector integration documentation ([#3041](https://github.com/juspay/hyperswitch/pull/3041)) ([`ce5514e`](https://github.com/juspay/hyperswitch/commit/ce5514eadfce240bc4cefb472405f37432a8507b))
+
+**Full Changelog:** [`v1.102.1...v1.103.0`](https://github.com/juspay/hyperswitch/compare/v1.102.1...v1.103.0)
+
+- - -
+
+
+## 1.102.1 (2023-12-18)
+
+### Bug Fixes
+
+- **connector:** [BOA/CYBERSOURCE] Update error handling ([#3156](https://github.com/juspay/hyperswitch/pull/3156)) ([`8e484dd`](https://github.com/juspay/hyperswitch/commit/8e484ddab8d3f4463299c7f7e8ce75b8dd628599))
+- **euclid_wasm:** Add function to retrieve keys for 3ds and surcharge decision manager ([#3160](https://github.com/juspay/hyperswitch/pull/3160)) ([`30fe9d1`](https://github.com/juspay/hyperswitch/commit/30fe9d19e4955035a370f8f9ce37963cdb76c68a))
+- **payment_link:** Added amount conversion to base unit based on currency ([#3162](https://github.com/juspay/hyperswitch/pull/3162)) ([`0fa61a9`](https://github.com/juspay/hyperswitch/commit/0fa61a9dd194c5b3688f8f68b056c263d92327d0))
+- Change prodintent name in dashboard metadata ([#3161](https://github.com/juspay/hyperswitch/pull/3161)) ([`8db3361`](https://github.com/juspay/hyperswitch/commit/8db3361d80f674a28a3916830a4b0c1c2b89776a))
+
+### Refactors
+
+- **connector:**
+ - [Helcim] change error message from not supported to not implemented ([#2850](https://github.com/juspay/hyperswitch/pull/2850)) ([`41b5a82`](https://github.com/juspay/hyperswitch/commit/41b5a82bafa9b0392bb43ed268fefc5187b48636))
+ - [Forte] change error message from not supported to not implemented ([#2847](https://github.com/juspay/hyperswitch/pull/2847)) ([`3fc0e2d`](https://github.com/juspay/hyperswitch/commit/3fc0e2d8195948d50f735df5192ae0f8431b432b))
+ - [Cryptopay] change error message from not supported to not implemented ([#2846](https://github.com/juspay/hyperswitch/pull/2846)) ([`2d895be`](https://github.com/juspay/hyperswitch/commit/2d895be9856d17cd923665568aa9b6e54fc1a305))
+- **router:** [ACI] change payment error message from not supported to not implemented error ([#2837](https://github.com/juspay/hyperswitch/pull/2837)) ([`cc12e8a`](https://github.com/juspay/hyperswitch/commit/cc12e8a2435e5e47eeec77c620c747b156a3e16b))
+- **users:** Rename `user_roles` and `dashboard_metadata` columns ([#3135](https://github.com/juspay/hyperswitch/pull/3135)) ([`e3589e6`](https://github.com/juspay/hyperswitch/commit/e3589e641c8a0b3b690b82f09a61d512db2d9932))
+
+**Full Changelog:** [`v1.102.0+hotfix.1...v1.102.1`](https://github.com/juspay/hyperswitch/compare/v1.102.0+hotfix.1...v1.102.1)
+
+- - -
+
+
+## 1.102.0 (2023-12-17)
+
+### Features
+
+- **connector:**
+ - [CYBERSOURCE] Implement Google Pay ([#3139](https://github.com/juspay/hyperswitch/pull/3139)) ([`4ae6af4`](https://github.com/juspay/hyperswitch/commit/4ae6af4632bbef5d21c3cb28538dcc4a94a10789))
+ - [PlaceToPay] Implement Cards for PlaceToPay ([#3117](https://github.com/juspay/hyperswitch/pull/3117)) ([`107c66f`](https://github.com/juspay/hyperswitch/commit/107c66fec331376aa8c9f1e710e1503793fde119))
+ - [CYBERSOURCE] Implement Apple Pay ([#3149](https://github.com/juspay/hyperswitch/pull/3149)) ([`5f53d84`](https://github.com/juspay/hyperswitch/commit/5f53d84a8b92f8aab67d09666b45362b287809ff))
+ - [NMI] Implement 3DS for Cards ([#3143](https://github.com/juspay/hyperswitch/pull/3143)) ([`7df4523`](https://github.com/juspay/hyperswitch/commit/7df45235b1b55c3e4f1205169fb512d2aadc98ac))
+
+### Bug Fixes
+
+- **connector:**
+ - [Checkout] Fix status mapping for checkout ([#3073](https://github.com/juspay/hyperswitch/pull/3073)) ([`5b2c329`](https://github.com/juspay/hyperswitch/commit/5b2c3291d4fbe3c4154c187b4e915dc3365e761a))
+ - [Cybersource] signature authentication in incremental_authorization flow ([#3141](https://github.com/juspay/hyperswitch/pull/3141)) ([`d47a7cc`](https://github.com/juspay/hyperswitch/commit/d47a7cc418b0f4bb609d99f4a463a14c39df46e4))
+- [CYBERSOURCE] Fix Status Mapping ([#3144](https://github.com/juspay/hyperswitch/pull/3144)) ([`62c0c47`](https://github.com/juspay/hyperswitch/commit/62c0c47e99f154399687a32caf9999b365da60ae))
+
+### Testing
+
+- **postman:** Update postman collection files ([`d40de4c`](https://github.com/juspay/hyperswitch/commit/d40de4c8b51010a9e6a3164196702a20c2ab3563))
+
+### Miscellaneous Tasks
+
+- **deps:** Bump zerocopy from 0.7.26 to 0.7.31 ([#3136](https://github.com/juspay/hyperswitch/pull/3136)) ([`d8de3c2`](https://github.com/juspay/hyperswitch/commit/d8de3c285c90103da93f0f3fd0241924dabd256f))
+- **events:** Remove duplicate logs ([#3148](https://github.com/juspay/hyperswitch/pull/3148)) ([`a78fed7`](https://github.com/juspay/hyperswitch/commit/a78fed73babace05b4f668ef219909277045ba85))
+
+**Full Changelog:** [`v1.101.0...v1.102.0`](https://github.com/juspay/hyperswitch/compare/v1.101.0...v1.102.0)
+
+- - -
+
+
+## 1.101.0 (2023-12-14)
+
+### Features
+
+- **payments:** Add outgoing payments webhooks ([#3133](https://github.com/juspay/hyperswitch/pull/3133)) ([`f457846`](https://github.com/juspay/hyperswitch/commit/f4578463d5e1a0f442aacebdfa7af0460489ba8c))
+
+### Bug Fixes
+
+- **connector:** [CashToCode]Fix cashtocode redirection for evoucher pm type ([#3131](https://github.com/juspay/hyperswitch/pull/3131)) ([`71a86a8`](https://github.com/juspay/hyperswitch/commit/71a86a804e15e4d053f92cfddb36a15cf7b77f7a))
+- **locker:** Fix double serialization for json request ([#3134](https://github.com/juspay/hyperswitch/pull/3134)) ([`70b86b7`](https://github.com/juspay/hyperswitch/commit/70b86b71e4809d2a47c6bc1214f72c37d3325c37))
+- **router:** Add routing cache invalidation on payment connector update ([#3132](https://github.com/juspay/hyperswitch/pull/3132)) ([`1f84865`](https://github.com/juspay/hyperswitch/commit/1f848659f135542fdfa967b3b48ad6cdf69fda2c))
+
+**Full Changelog:** [`v1.100.0...v1.101.0`](https://github.com/juspay/hyperswitch/compare/v1.100.0...v1.101.0)
+
+- - -
+
+
+## 1.100.0 (2023-12-14)
+
+### Features
+
+- **connector:**
+ - [RISKIFIED] Add support for riskified frm connector ([#2533](https://github.com/juspay/hyperswitch/pull/2533)) ([`151a30f`](https://github.com/juspay/hyperswitch/commit/151a30f4eed10924cd93bf7f4f66976af0ab8314))
+ - [HELCIM] Add connector_request_reference_id in invoice_number ([#3087](https://github.com/juspay/hyperswitch/pull/3087)) ([`3cc9642`](https://github.com/juspay/hyperswitch/commit/3cc9642f3ac4c07fb675e9ff4032832819d877a1))
+- **core:** Enable surcharge support for all connectors ([#3109](https://github.com/juspay/hyperswitch/pull/3109)) ([`57e1ae9`](https://github.com/juspay/hyperswitch/commit/57e1ae9dea6ff70fb1bca47c479c35026c167bad))
+- **events:** Add type info to outgoing requests & maintain structural & PII type info ([#2956](https://github.com/juspay/hyperswitch/pull/2956)) ([`6e82b0b`](https://github.com/juspay/hyperswitch/commit/6e82b0bd746b405281f79b86a3cd92b550a33f68))
+- **external_services:** Adds encrypt function for KMS ([#3111](https://github.com/juspay/hyperswitch/pull/3111)) ([`bca7cdb`](https://github.com/juspay/hyperswitch/commit/bca7cdb4c14b5fbb40d8cbf59fd1756ad27ac674))
+
+### Bug Fixes
+
+- **api_locking:** Fix the unit interpretation for `LockSettings` expiry ([#3121](https://github.com/juspay/hyperswitch/pull/3121)) ([`3f4167d`](https://github.com/juspay/hyperswitch/commit/3f4167dbd477c793e1a4cc572da0c12d66f2b649))
+- **connector:** [trustpay] make paymentId optional field ([#3101](https://github.com/juspay/hyperswitch/pull/3101)) ([`62a7c30`](https://github.com/juspay/hyperswitch/commit/62a7c3053c5e276091f5bd54a5679caef58a4ace))
+- **docker-compose:** Remove label list from docker compose yml ([#3118](https://github.com/juspay/hyperswitch/pull/3118)) ([`e1e23fd`](https://github.com/juspay/hyperswitch/commit/e1e23fd987cae96e56311d1cfdcb225d9327860c))
+- Validate refund amount with amount_captured instead of amount ([#3120](https://github.com/juspay/hyperswitch/pull/3120)) ([`be13d15`](https://github.com/juspay/hyperswitch/commit/be13d15d3c0214c863e131cf1dbe184d5baec5d7))
+
+### Refactors
+
+- **connector:** [Wise] Error Message For Connector Implementation ([#2952](https://github.com/juspay/hyperswitch/pull/2952)) ([`1add2c0`](https://github.com/juspay/hyperswitch/commit/1add2c059f4fb5653f33e2f3ce454793caf2d595))
+- **payments:** Add support for receiving card_holder_name field as an empty string ([#3127](https://github.com/juspay/hyperswitch/pull/3127)) ([`4d19d8b`](https://github.com/juspay/hyperswitch/commit/4d19d8b1d18f49f02e951c5025d35cf5d62cec1b))
+
+### Testing
+
+- **postman:** Update postman collection files ([`a5618cd`](https://github.com/juspay/hyperswitch/commit/a5618cd5d6eb5b007f7927f05e777e875195a678))
+
+**Full Changelog:** [`v1.99.0...v1.100.0`](https://github.com/juspay/hyperswitch/compare/v1.99.0...v1.100.0)
+
+- - -
+
+
+## 1.99.0 (2023-12-12)
+
+### Features
+
+- **connector:** [Placetopay] Add Connector Template Code ([#3084](https://github.com/juspay/hyperswitch/pull/3084)) ([`a7b688a`](https://github.com/juspay/hyperswitch/commit/a7b688aac72e15f782046b9d108aca12f43a9994))
+- Add utility to convert TOML configuration file to list of environment variables ([#3096](https://github.com/juspay/hyperswitch/pull/3096)) ([`2c4599a`](https://github.com/juspay/hyperswitch/commit/2c4599a1cd7e244b6fb11948c88c55c5b8faad76))
+
+### Bug Fixes
+
+- **router:** Make `request_incremental_authorization` optional in payment_intent ([#3086](https://github.com/juspay/hyperswitch/pull/3086)) ([`f7da59d`](https://github.com/juspay/hyperswitch/commit/f7da59d06af11707e210b58a875c013d31c3ee17))
+
+### Refactors
+
+- **email:** Create client every time of sending email ([#3105](https://github.com/juspay/hyperswitch/pull/3105)) ([`fc2f163`](https://github.com/juspay/hyperswitch/commit/fc2f16392148cd66b3c3e67e3e0c782910e37e1f))
+
+### Testing
+
+- **postman:** Update postman collection files ([`aa97821`](https://github.com/juspay/hyperswitch/commit/aa9782164fb7846fe533c5057a17756dc82ede54))
+
+### Miscellaneous Tasks
+
+- **deps:** Update fred and moka ([#3088](https://github.com/juspay/hyperswitch/pull/3088)) ([`129b1e5`](https://github.com/juspay/hyperswitch/commit/129b1e55bd1cbad0243030fd25379f1400eb170c))
+
+**Full Changelog:** [`v1.98.0...v1.99.0`](https://github.com/juspay/hyperswitch/compare/v1.98.0...v1.99.0)
+
+- - -
+
+
+## 1.98.0 (2023-12-11)
+
+### Features
+
+- **connector:** Accept connector_transaction_id in error_response of connector flows for Trustpay ([#3060](https://github.com/juspay/hyperswitch/pull/3060)) ([`f53b090`](https://github.com/juspay/hyperswitch/commit/f53b090db87e094f9694481f13af62240c4c422a))
+- **pm_auth:** Pm_auth service migration ([#3047](https://github.com/juspay/hyperswitch/pull/3047)) ([`9c1c44a`](https://github.com/juspay/hyperswitch/commit/9c1c44a706750b14857e9180f5161b61ed89a2ad))
+- **user:** Add `verify_email` API ([#3076](https://github.com/juspay/hyperswitch/pull/3076)) ([`585e009`](https://github.com/juspay/hyperswitch/commit/585e00980c43797f326efb809df9ffd497d1dd26))
+- **users:** Add resend verification email API ([#3093](https://github.com/juspay/hyperswitch/pull/3093)) ([`6d5c25e`](https://github.com/juspay/hyperswitch/commit/6d5c25e3369117acaf5865965769649d524226af))
+
+### Bug Fixes
+
+- **analytics:** Adding api_path to api logs event and to auditlogs api response ([#3079](https://github.com/juspay/hyperswitch/pull/3079)) ([`bf67438`](https://github.com/juspay/hyperswitch/commit/bf674380d5c7e856d0bae75554326aa9017c0201))
+- **config:** Add missing config fields in `docker_compose.toml` ([#3080](https://github.com/juspay/hyperswitch/pull/3080)) ([`1f8116d`](https://github.com/juspay/hyperswitch/commit/1f8116db368aec344d08603045c4cb46c2c25b41))
+- **connector:** [CYBERSOURCE] Remove Phone Number Field From Address ([#3095](https://github.com/juspay/hyperswitch/pull/3095)) ([`72955ec`](https://github.com/juspay/hyperswitch/commit/72955ecc68280773b9c77b4db3d46de95a62f9ed))
+- **drainer:** Properly log deserialization errors ([#3075](https://github.com/juspay/hyperswitch/pull/3075)) ([`42b5bd4`](https://github.com/juspay/hyperswitch/commit/42b5bd4f3d142c9fa12475f36a8b144753ac06e2))
+- **router:** Allow zero amount for payment intent in list payment methods ([#3090](https://github.com/juspay/hyperswitch/pull/3090)) ([`b283b6b`](https://github.com/juspay/hyperswitch/commit/b283b6b662c9f2eabe90473434369d8f7c2369a6))
+- **user:** Add checks for change password ([#3078](https://github.com/juspay/hyperswitch/pull/3078)) ([`26a2611`](https://github.com/juspay/hyperswitch/commit/26a261131b4dbb8570e139127a2c0d356e2820be))
+
+### Refactors
+
+- **payment_methods:** Make the card_holder_name optional for card details in the payment APIs ([#3074](https://github.com/juspay/hyperswitch/pull/3074)) ([`b279591`](https://github.com/juspay/hyperswitch/commit/b279591057cdba6004c99efc82bb856f0bacd1e0))
+- **user:** Add account verification check in signin ([#3082](https://github.com/juspay/hyperswitch/pull/3082)) ([`f7d6e3c`](https://github.com/juspay/hyperswitch/commit/f7d6e3c0149869175a59996e67d3e2d3b6f3b8c2))
+
+### Documentation
+
+- **openapi:** Fix `payment_methods_enabled` OpenAPI spec in merchant connector account APIs ([#3068](https://github.com/juspay/hyperswitch/pull/3068)) ([`b6838c4`](https://github.com/juspay/hyperswitch/commit/b6838c4d1a3a456e28a5f438fcd74a60bedb2539))
+
+### Miscellaneous Tasks
+
+- **configs:** [CYBERSOURCE] Add mandate configs ([#3085](https://github.com/juspay/hyperswitch/pull/3085)) ([`777cd5c`](https://github.com/juspay/hyperswitch/commit/777cd5cdc2342fb7195a06505647fa331725e1dd))
+
+**Full Changelog:** [`v1.97.0...v1.98.0`](https://github.com/juspay/hyperswitch/compare/v1.97.0...v1.98.0)
+
+- - -
+
+
+## 1.97.0 (2023-12-06)
+
+### Features
+
+- **Braintree:** Sync with Hyperswitch Reference ([#3037](https://github.com/juspay/hyperswitch/pull/3037)) ([`8a995ce`](https://github.com/juspay/hyperswitch/commit/8a995cefdf6806645383710c6f39d963da232e94))
+- **connector:** [BANKOFAMERICA] Implement Apple Pay ([#3061](https://github.com/juspay/hyperswitch/pull/3061)) ([`47c0383`](https://github.com/juspay/hyperswitch/commit/47c038300adad1c02e4c77d529c7cc2457cf3b91))
+- **metrics:** Add drainer delay metric ([#3034](https://github.com/juspay/hyperswitch/pull/3034)) ([`c6e2ee2`](https://github.com/juspay/hyperswitch/commit/c6e2ee29d9ee4fe54e6fa6f87c2fa065a290d258))
+
+### Bug Fixes
+
+- **config:** Parse kafka brokers from env variable as sequence ([#3066](https://github.com/juspay/hyperswitch/pull/3066)) ([`84decd8`](https://github.com/juspay/hyperswitch/commit/84decd8126d306a5e1cf22b36e1378a73dc963f5))
+- Throw bad request while pushing duplicate data to redis ([#3016](https://github.com/juspay/hyperswitch/pull/3016)) ([`a2405e5`](https://github.com/juspay/hyperswitch/commit/a2405e56fbd84936a1afa6aa9f8f7e815267fbec))
+- Return url none on complete authorize ([#3067](https://github.com/juspay/hyperswitch/pull/3067)) ([`6eec06b`](https://github.com/juspay/hyperswitch/commit/6eec06b1d6ee9a00b374905e0ab9e425d0e41095))
+
+### Miscellaneous Tasks
+
+- **codeowners:** Add codeowners for hyperswitch dashboard ([#3057](https://github.com/juspay/hyperswitch/pull/3057)) ([`cfafd5c`](https://github.com/juspay/hyperswitch/commit/cfafd5cd29857283d57731dda7c5a332a493f531))
+
+**Full Changelog:** [`v1.96.0...v1.97.0`](https://github.com/juspay/hyperswitch/compare/v1.96.0...v1.97.0)
+
+- - -
+
+
+## 1.96.0 (2023-12-05)
+
+### Features
+
+- **connector_onboarding:** Add Connector onboarding APIs ([#3050](https://github.com/juspay/hyperswitch/pull/3050)) ([`7bd6e05`](https://github.com/juspay/hyperswitch/commit/7bd6e05c0c05ebae9b82a6f410e61ca4409d088b))
+- **pm_list:** Add required fields for bancontact_card for Mollie, Adyen and Stripe ([#3035](https://github.com/juspay/hyperswitch/pull/3035)) ([`792e642`](https://github.com/juspay/hyperswitch/commit/792e642ad58f90bae3ddcea5e6cbc70e948d8e28))
+- **user:** Add email apis and new enums for metadata ([#3053](https://github.com/juspay/hyperswitch/pull/3053)) ([`1c3d260`](https://github.com/juspay/hyperswitch/commit/1c3d260dc3e18fbf6cbd5122122a6c73dceb39a3))
+- Implement FRM flows ([#2968](https://github.com/juspay/hyperswitch/pull/2968)) ([`055d838`](https://github.com/juspay/hyperswitch/commit/055d8383671f6b466297c177bcc770618c7da96a))
+
+### Bug Fixes
+
+- Remove redundant call to populate_payment_data function ([#3054](https://github.com/juspay/hyperswitch/pull/3054)) ([`53df543`](https://github.com/juspay/hyperswitch/commit/53df543b7f1407a758232025b7de0fb527be8e86))
+
+### Documentation
+
+- **test_utils:** Update postman docs ([#3055](https://github.com/juspay/hyperswitch/pull/3055)) ([`8b7a7aa`](https://github.com/juspay/hyperswitch/commit/8b7a7aa6494ff669e1f8bcc92a5160e422d6b26e))
+
+**Full Changelog:** [`v1.95.0...v1.96.0`](https://github.com/juspay/hyperswitch/compare/v1.95.0...v1.96.0)
+
+- - -
+
+
+## 1.95.0 (2023-12-05)
+
+### Features
+
+- **connector:** [BOA/CYBERSOURCE] Fix Status Mapping for Terminal St… ([#3031](https://github.com/juspay/hyperswitch/pull/3031)) ([`95876b0`](https://github.com/juspay/hyperswitch/commit/95876b0ce03e024edf77909502c53eb4e63a9855))
+- **pm_list:** Add required field for open_banking_uk for Adyen and Volt Connector ([#3032](https://github.com/juspay/hyperswitch/pull/3032)) ([`9d93533`](https://github.com/juspay/hyperswitch/commit/9d935332193dcc9f191a0a5a9e7405316794a418))
+- **router:**
+ - Add key_value to locker metrics ([#2995](https://github.com/juspay/hyperswitch/pull/2995)) ([`83fcd1a`](https://github.com/juspay/hyperswitch/commit/83fcd1a9deb106a44c8262923c7f1660b0c46bf2))
+ - Add payments incremental authorization api ([#3038](https://github.com/juspay/hyperswitch/pull/3038)) ([`a0cfdd3`](https://github.com/juspay/hyperswitch/commit/a0cfdd3fb12f04b603f65551eac985c31e08da85))
+- **types:** Add email types for sending emails ([#3020](https://github.com/juspay/hyperswitch/pull/3020)) ([`c4bd47e`](https://github.com/juspay/hyperswitch/commit/c4bd47eca93a158c9daeeeb18afb1e735eea8c94))
+- **user:**
+ - Generate and delete sample data ([#2987](https://github.com/juspay/hyperswitch/pull/2987)) ([`092ec73`](https://github.com/juspay/hyperswitch/commit/092ec73b3c65ce6048d379383b078d643f0f35fc))
+ - Add user_list and switch_list apis ([#3033](https://github.com/juspay/hyperswitch/pull/3033)) ([`ec15ddd`](https://github.com/juspay/hyperswitch/commit/ec15ddd0d0ed942fedec525406df3005d494b8d4))
+- Calculate surcharge for customer saved card list ([#3039](https://github.com/juspay/hyperswitch/pull/3039)) ([`daf0f09`](https://github.com/juspay/hyperswitch/commit/daf0f09f8e3293ee6a3599a25362d9171fc5b2e7))
+
+### Bug Fixes
+
+- **connector:** [Paypal] Parse response for Cards with no 3DS check ([#3021](https://github.com/juspay/hyperswitch/pull/3021)) ([`d883cd1`](https://github.com/juspay/hyperswitch/commit/d883cd18972c5f9e8350e9a3f4e5cd56ec2c0787))
+- **pm_list:** [Trustpay]Update dynamic fields for trustpay blik ([#3042](https://github.com/juspay/hyperswitch/pull/3042)) ([`9274cef`](https://github.com/juspay/hyperswitch/commit/9274cefbdd29d2ac64baeea2fe504dff2472cb47))
+- **wasm:** Fix wasm function to return the categories for keys with their description respectively ([#3023](https://github.com/juspay/hyperswitch/pull/3023)) ([`2ac5b2c`](https://github.com/juspay/hyperswitch/commit/2ac5b2cd764c0aad53ac7c672dfcc9132fa5668f))
+- Use card bin to get additional card details ([#3036](https://github.com/juspay/hyperswitch/pull/3036)) ([`6c7d3a2`](https://github.com/juspay/hyperswitch/commit/6c7d3a2e8a047ff23b52b76792fe8f28d3b952a4))
+- Transform connector name to lowercase in connector integration script ([#3048](https://github.com/juspay/hyperswitch/pull/3048)) ([`298e362`](https://github.com/juspay/hyperswitch/commit/298e3627c379de5acfcafb074036754661801f1e))
+- Add fallback to reverselookup error ([#3025](https://github.com/juspay/hyperswitch/pull/3025)) ([`ba392f5`](https://github.com/juspay/hyperswitch/commit/ba392f58b2956d67e93a08853bcf2270a869be27))
+
+### Refactors
+
+- **payment_methods:** Add support for passing card_cvc in payment_method_data object along with token ([#3024](https://github.com/juspay/hyperswitch/pull/3024)) ([`3ce04ab`](https://github.com/juspay/hyperswitch/commit/3ce04abae4eddfa27025368f5ef28987cccea43d))
+- **users:** Separate signup and signin ([#2921](https://github.com/juspay/hyperswitch/pull/2921)) ([`80efeb7`](https://github.com/juspay/hyperswitch/commit/80efeb76b1801529766978af1c06e2d2c7de66c0))
+- Create separate struct for surcharge details response ([#3027](https://github.com/juspay/hyperswitch/pull/3027)) ([`57591f8`](https://github.com/juspay/hyperswitch/commit/57591f819c7994099e76cff1affc7bcf3e45a031))
+
+### Testing
+
+- **postman:** Update postman collection files ([`6e09bc9`](https://github.com/juspay/hyperswitch/commit/6e09bc9e2c4bbe14dcb70da4a438850b03b3254c))
+
+**Full Changelog:** [`v1.94.0...v1.95.0`](https://github.com/juspay/hyperswitch/compare/v1.94.0...v1.95.0)
+
+- - -
+
+
+## 1.94.0 (2023-12-01)
+
+### Features
+
+- **user_role:** Add APIs for user roles ([#3013](https://github.com/juspay/hyperswitch/pull/3013)) ([`3fa0bdf`](https://github.com/juspay/hyperswitch/commit/3fa0bdf76558ec91df8d3beef3c36658cd138b37))
+
+### Bug Fixes
+
+- **config:** Add kms decryption support for sqlx password ([#3029](https://github.com/juspay/hyperswitch/pull/3029)) ([`b593467`](https://github.com/juspay/hyperswitch/commit/b5934674e518f991a8a575ad01b971dd086eeb40))
+
+### Refactors
+
+- **connector:**
+ - [Multisafe Pay] change error message from not supported to not implemented ([#2851](https://github.com/juspay/hyperswitch/pull/2851)) ([`668b943`](https://github.com/juspay/hyperswitch/commit/668b943403df2b3bb354dd093b8ec073a2618bda))
+ - [Shift4] change error message from NotSupported to NotImplemented ([#2880](https://github.com/juspay/hyperswitch/pull/2880)) ([`bc79d52`](https://github.com/juspay/hyperswitch/commit/bc79d522c30aa036378cf1e01354c422585cc226))
+
+**Full Changelog:** [`v1.93.0...v1.94.0`](https://github.com/juspay/hyperswitch/compare/v1.93.0...v1.94.0)
+
+- - -
+
+
+## 1.93.0 (2023-11-30)
+
+### Features
+
+- **connector:** [BANKOFAMERICA] Add Required Fields for GPAY ([#3014](https://github.com/juspay/hyperswitch/pull/3014)) ([`d30b58a`](https://github.com/juspay/hyperswitch/commit/d30b58abb5e716b70c2dadec9e6f13c9e3403b6f))
+- **core:** Add ability to verify connector credentials before integrating the connector ([#2986](https://github.com/juspay/hyperswitch/pull/2986)) ([`39f255b`](https://github.com/juspay/hyperswitch/commit/39f255b4b209588dec35d780078c2ab7ceb37b10))
+- **router:** Make core changes in payments flow to support incremental authorization ([#3009](https://github.com/juspay/hyperswitch/pull/3009)) ([`1ca2ba4`](https://github.com/juspay/hyperswitch/commit/1ca2ba459495ff9340954c87a6ae3e4dce0e7b71))
+- **user:** Add support for dashboard metadata ([#3000](https://github.com/juspay/hyperswitch/pull/3000)) ([`6a2e4ab`](https://github.com/juspay/hyperswitch/commit/6a2e4ab4169820f35e953a949bd2e82e7f098ed2))
+
+### Bug Fixes
+
+- **connector:**
+ - Move authorised status to charged in setup mandate ([#3017](https://github.com/juspay/hyperswitch/pull/3017)) ([`663754d`](https://github.com/juspay/hyperswitch/commit/663754d629d59a17ba9d4985fe04f9404ceb16b7))
+ - [Trustpay] Add mapping to error code `800.100.165` and `900.100.100` ([#2925](https://github.com/juspay/hyperswitch/pull/2925)) ([`8c37a8d`](https://github.com/juspay/hyperswitch/commit/8c37a8d857c5a58872fa2b2e194b85e755129677))
+- **core:** Error message on Refund update for `Not Implemented` Case ([#3011](https://github.com/juspay/hyperswitch/pull/3011)) ([`6b7ada1`](https://github.com/juspay/hyperswitch/commit/6b7ada1a34450ea3a7fc019375ba462a14ddd6ab))
+- **pm_list:** [Trustpay] Update Cards, Bank_redirect - blik pm type required field info for Trustpay ([#2999](https://github.com/juspay/hyperswitch/pull/2999)) ([`c05432c`](https://github.com/juspay/hyperswitch/commit/c05432c0bd70f222c2f898ce2cbb47a46364a490))
+- **router:**
+ - [Dlocal] connector transaction id fix ([#2872](https://github.com/juspay/hyperswitch/pull/2872)) ([`44b1f49`](https://github.com/juspay/hyperswitch/commit/44b1f4949ea06d59480670ccfa02446fa7713d13))
+ - Use default value for the routing algorithm column during business profile creation ([#2791](https://github.com/juspay/hyperswitch/pull/2791)) ([`b1fe76a`](https://github.com/juspay/hyperswitch/commit/b1fe76a82b4026d6eaa3baf4356378040880a458))
+- **routing:** Fix kgraph to exclude PM auth during construction ([#3019](https://github.com/juspay/hyperswitch/pull/3019)) ([`c6cb527`](https://github.com/juspay/hyperswitch/commit/c6cb527f07e23796c342f3562fbf3b61f1ef6801))
+
+### Refactors
+
+- **connector:**
+ - [Stax] change error message from NotSupported to NotImplemented ([#2879](https://github.com/juspay/hyperswitch/pull/2879)) ([`8a4dabc`](https://github.com/juspay/hyperswitch/commit/8a4dabc61df3e6012e50f785d93808ca3349be65))
+ - [Volt] change error message from NotSupported to NotImplemented ([#2878](https://github.com/juspay/hyperswitch/pull/2878)) ([`de8e31b`](https://github.com/juspay/hyperswitch/commit/de8e31b70d9b3c11e268cd1deffa71918dc4270d))
+ - [Adyen] Change country and issuer type to Optional for OpenBankingUk ([#2993](https://github.com/juspay/hyperswitch/pull/2993)) ([`ab3dac7`](https://github.com/juspay/hyperswitch/commit/ab3dac79b4f138cd1f60a9afc0635dcc137a4a05))
+- **postman:** Fix payme postman collection for handling `order_details` ([#2996](https://github.com/juspay/hyperswitch/pull/2996)) ([`1e60c71`](https://github.com/juspay/hyperswitch/commit/1e60c710985b341a118bb32962bd74b406d78f69))
+
+**Full Changelog:** [`v1.92.0...v1.93.0`](https://github.com/juspay/hyperswitch/compare/v1.92.0...v1.93.0)
+
+- - -
+
+
+## 1.92.0 (2023-11-29)
+
+### Features
+
+- **analytics:** Add Clickhouse based analytics ([#2988](https://github.com/juspay/hyperswitch/pull/2988)) ([`9df4e01`](https://github.com/juspay/hyperswitch/commit/9df4e0193ffeb6d1cc323bdebb7e2bdfb2a375e2))
+- **ses_email:** Add email services to hyperswitch ([#2977](https://github.com/juspay/hyperswitch/pull/2977)) ([`5f5e895`](https://github.com/juspay/hyperswitch/commit/5f5e895f638701a0e6ab3deea9101ef39033dd16))
+
+### Bug Fixes
+
+- **router:** Make use of warning to log errors when apple pay metadata parsing fails ([#3010](https://github.com/juspay/hyperswitch/pull/3010)) ([`2e57745`](https://github.com/juspay/hyperswitch/commit/2e57745352c547323ac2df2554f6bc2dbd6da37f))
+
+**Full Changelog:** [`v1.91.1...v1.92.0`](https://github.com/juspay/hyperswitch/compare/v1.91.1...v1.92.0)
+
+- - -
+
+
+## 1.91.1 (2023-11-29)
+
+### Bug Fixes
+
+- Remove `dummy_connector` from `default` features in `common_enums` ([#3005](https://github.com/juspay/hyperswitch/pull/3005)) ([`bb593ab`](https://github.com/juspay/hyperswitch/commit/bb593ab0cd1a30190b6c305f2432de83ac7fde93))
+- Remove error propagation if card name not found in locker in case of temporary token ([#3006](https://github.com/juspay/hyperswitch/pull/3006)) ([`5c32b37`](https://github.com/juspay/hyperswitch/commit/5c32b3739e2c5895fe7f5cf8cc92f917c2639eac))
+- Few fields were not getting updated in apply_changeset function ([#3002](https://github.com/juspay/hyperswitch/pull/3002)) ([`d289524`](https://github.com/juspay/hyperswitch/commit/d289524869f0c3835db9cf90d57ebedf560e0291))
+
+### Miscellaneous Tasks
+
+- **deps:** Bump openssl from 0.10.57 to 0.10.60 ([#3004](https://github.com/juspay/hyperswitch/pull/3004)) ([`1c2f35a`](https://github.com/juspay/hyperswitch/commit/1c2f35af92608fca5836448710eca9f9c23a776a))
+
+**Full Changelog:** [`v1.91.0...v1.91.1`](https://github.com/juspay/hyperswitch/compare/v1.91.0...v1.91.1)
+
+- - -
+
+
+## 1.91.0 (2023-11-28)
+
+### Features
+
+- **core:**
+ - [Paypal] Add Preprocessing flow to CompleteAuthorize for Card 3DS Auth Verification ([#2757](https://github.com/juspay/hyperswitch/pull/2757)) ([`77fc92c`](https://github.com/juspay/hyperswitch/commit/77fc92c99a99aaf76d270ba5b981928183a05768))
+ - Enable payment refund when payment is partially captured ([#2991](https://github.com/juspay/hyperswitch/pull/2991)) ([`837480d`](https://github.com/juspay/hyperswitch/commit/837480d935cce8cc35f07c5ccb3560285909bc52))
+- **currency_conversion:** Add currency conversion feature ([#2948](https://github.com/juspay/hyperswitch/pull/2948)) ([`c0116db`](https://github.com/juspay/hyperswitch/commit/c0116db271f6afc1b93c04705209bfc346228c68))
+- **payment_methods:** Receive `card_holder_name` in confirm flow when using token for payment ([#2982](https://github.com/juspay/hyperswitch/pull/2982)) ([`e7ad3a4`](https://github.com/juspay/hyperswitch/commit/e7ad3a4db8823f3ae8d381771739670d8350e6da))
+
+### Bug Fixes
+
+- **connector:** [Adyen] `ErrorHandling` in case of Balance Check for Gift Cards ([#1976](https://github.com/juspay/hyperswitch/pull/1976)) ([`bd889c8`](https://github.com/juspay/hyperswitch/commit/bd889c834dd5e201b055233016f7226fa2187aea))
+- **core:** Replace euclid enum with RoutableConnectors enum ([#2994](https://github.com/juspay/hyperswitch/pull/2994)) ([`ff6a0dd`](https://github.com/juspay/hyperswitch/commit/ff6a0dd0b515778b64a3e28ef905154eee85ec78))
+- Remove error propagation if card name not found in locker ([#2998](https://github.com/juspay/hyperswitch/pull/2998)) ([`1c5a9b5`](https://github.com/juspay/hyperswitch/commit/1c5a9b5452afc33b18f45389bf3bdfd80820f476))
+
+### Refactors
+
+- **events:** Adding changes to type of API events to Kafka ([#2992](https://github.com/juspay/hyperswitch/pull/2992)) ([`d63f6f7`](https://github.com/juspay/hyperswitch/commit/d63f6f7224f35018e7c707353508bbacc2baed5c))
+- **masking:** Use empty enums as masking:Strategy types ([#2874](https://github.com/juspay/hyperswitch/pull/2874)) ([`0e66b1b`](https://github.com/juspay/hyperswitch/commit/0e66b1b5dcce6dd87c9d743c9eb73d0cd8e330b2))
+- **router:** Add openapi spec support for merchant_connector apis ([#2997](https://github.com/juspay/hyperswitch/pull/2997)) ([`cdbb385`](https://github.com/juspay/hyperswitch/commit/cdbb3853cd44443f8487abc16a9ba5d99f22e475))
+- Added min idle and max lifetime for database config ([#2900](https://github.com/juspay/hyperswitch/pull/2900)) ([`b3c51e6`](https://github.com/juspay/hyperswitch/commit/b3c51e6eb55c58adc024ee32b59c3910b2b72131))
+
+### Testing
+
+- **postman:** Update postman collection files ([`af6b05c`](https://github.com/juspay/hyperswitch/commit/af6b05c504b6fdbec7db77fa7f71535d7fea3e7a))
+
+**Full Changelog:** [`v1.90.0...v1.91.0`](https://github.com/juspay/hyperswitch/compare/v1.90.0...v1.91.0)
+
+- - -
+
+
+## 1.90.0 (2023-11-27)
+
+### Features
+
+- **auth:** Add Authorization for JWT Authentication types ([#2973](https://github.com/juspay/hyperswitch/pull/2973)) ([`03c0a77`](https://github.com/juspay/hyperswitch/commit/03c0a772a99000acf4676db8ca2ce916036281d1))
+- **user:** Implement change password for user ([#2959](https://github.com/juspay/hyperswitch/pull/2959)) ([`bfa1645`](https://github.com/juspay/hyperswitch/commit/bfa1645b847fb881eb2370d5dbfef6fd0b53725d))
+
+### Bug Fixes
+
+- **router:** Added validation to check total orderDetails amount equal to amount in request ([#2965](https://github.com/juspay/hyperswitch/pull/2965)) ([`37532d4`](https://github.com/juspay/hyperswitch/commit/37532d46f599a99e0e021b0455a6f02381005dd7))
+- Add prefix to connector_transaction_id ([#2981](https://github.com/juspay/hyperswitch/pull/2981)) ([`107c3b9`](https://github.com/juspay/hyperswitch/commit/107c3b99417dd7bca7b62741ad601485700f37be))
+
+### Refactors
+
+- **connector:** [Nuvei] update error message ([#2867](https://github.com/juspay/hyperswitch/pull/2867)) ([`04b7c03`](https://github.com/juspay/hyperswitch/commit/04b7c0384dc9290bd60f49033fd35732527720f1))
+
+### Testing
+
+- **postman:** Update postman collection files ([`aee59e0`](https://github.com/juspay/hyperswitch/commit/aee59e088a8e7c1b81aca1015c90c7b4fd07511d))
+
+### Documentation
+
+- **try_local_system:** Add instructions to run using Docker Compose by pulling standalone images ([#2984](https://github.com/juspay/hyperswitch/pull/2984)) ([`0fa8ad1`](https://github.com/juspay/hyperswitch/commit/0fa8ad1b7c27010bf83e4035de9881d29e192e8a))
+
+### Miscellaneous Tasks
+
+- **connector:** Update connector addition script ([#2801](https://github.com/juspay/hyperswitch/pull/2801)) ([`34953a0`](https://github.com/juspay/hyperswitch/commit/34953a046429fe0341e8469bd9b036e176bda205))
+
+**Full Changelog:** [`v1.89.0...v1.90.0`](https://github.com/juspay/hyperswitch/compare/v1.89.0...v1.90.0)
+
+- - -
+
+
+## 1.89.0 (2023-11-24)
+
+### Features
+
+- **router:** Add `connector_transaction_id` in error_response from connector flows ([#2972](https://github.com/juspay/hyperswitch/pull/2972)) ([`3322103`](https://github.com/juspay/hyperswitch/commit/3322103f5c9b7c2a5b663980246c6ca36b8dc63e))
+
+### Bug Fixes
+
+- **connector:** [BANKOFAMERICA] Add status VOIDED in enum Bankofameri… ([#2969](https://github.com/juspay/hyperswitch/pull/2969)) ([`203bbd7`](https://github.com/juspay/hyperswitch/commit/203bbd73751e1513206e81d7cf920ec263f83c58))
+- **core:** Error propagation for not supporting partial refund ([#2976](https://github.com/juspay/hyperswitch/pull/2976)) ([`97a38a7`](https://github.com/juspay/hyperswitch/commit/97a38a78e514e4fa3b5db46b6de985be6312dcc3))
+- **router:** Mark refund status as failure for not_implemented error from connector flows ([#2978](https://github.com/juspay/hyperswitch/pull/2978)) ([`d56d805`](https://github.com/juspay/hyperswitch/commit/d56d80557050336d5ed37282f1aa34b6c17389d1))
+- Return none instead of err when payment method data is not found for bank debit during listing ([#2967](https://github.com/juspay/hyperswitch/pull/2967)) ([`5cc829a`](https://github.com/juspay/hyperswitch/commit/5cc829a11f515a413fe19f657a90aa05cebb99b5))
+- Surcharge related status and rules fix ([#2974](https://github.com/juspay/hyperswitch/pull/2974)) ([`3db7213`](https://github.com/juspay/hyperswitch/commit/3db721388a7f0e291d7eb186661fc69a57068ea6))
+
+### Documentation
+
+- **README:** Updated Community Platform Mentions ([#2960](https://github.com/juspay/hyperswitch/pull/2960)) ([`e0bde43`](https://github.com/juspay/hyperswitch/commit/e0bde433282a34eb9eb28a2d9c43c2b17b5e65e5))
+- Add Rust locker information in architecture doc ([#2964](https://github.com/juspay/hyperswitch/pull/2964)) ([`b2f7dd1`](https://github.com/juspay/hyperswitch/commit/b2f7dd13925a1429e316cd9eaf0e2d31d46b6d4a))
+
+**Full Changelog:** [`v1.88.0...v1.89.0`](https://github.com/juspay/hyperswitch/compare/v1.88.0...v1.89.0)
+
+- - -
+
+
+## 1.88.0 (2023-11-23)
+
+### Features
+
+- **connector:** [BANKOFAMERICA] Implement Google Pay ([#2940](https://github.com/juspay/hyperswitch/pull/2940)) ([`f91d4ae`](https://github.com/juspay/hyperswitch/commit/f91d4ae11b02def92c1dde743a0c01b5aac5703f))
+- **router:** Allow billing and shipping address update in payments confirm flow ([#2963](https://github.com/juspay/hyperswitch/pull/2963)) ([`59ef162`](https://github.com/juspay/hyperswitch/commit/59ef162219db3e4650dde65710850bc9f3280530))
+
+### Bug Fixes
+
+- **connector:** [Prophetpay] Use refund_id as reference_id for Refund ([#2966](https://github.com/juspay/hyperswitch/pull/2966)) ([`dd3e22a`](https://github.com/juspay/hyperswitch/commit/dd3e22a938714f373477e08d1d25e4b84ac796c6))
+- **core:** Fix Default Values Enum FieldType ([#2934](https://github.com/juspay/hyperswitch/pull/2934)) ([`35a44ed`](https://github.com/juspay/hyperswitch/commit/35a44ed2533b748e3fabb8a2f8db4fa7e5d3cf7e))
+- **drainer:** Increase jobs picked only when stream is not empty ([#2958](https://github.com/juspay/hyperswitch/pull/2958)) ([`42eedf3`](https://github.com/juspay/hyperswitch/commit/42eedf3a8c2e62fc22bcead370d129ebaf11a00b))
+- Amount_captured goes to 0 for 3ds payments ([#2954](https://github.com/juspay/hyperswitch/pull/2954)) ([`75eea7e`](https://github.com/juspay/hyperswitch/commit/75eea7e81787f2e0697b930b82a8188193f8d51f))
+- Make drainer sleep on every loop interval instead of cycle end ([#2951](https://github.com/juspay/hyperswitch/pull/2951)) ([`e8df690`](https://github.com/juspay/hyperswitch/commit/e8df69092f4c6acee58109aaff2a9454fceb571a))
+
+### Refactors
+
+- **connector:**
+ - [Payeezy] update error message ([#2919](https://github.com/juspay/hyperswitch/pull/2919)) ([`cb65370`](https://github.com/juspay/hyperswitch/commit/cb653706066b889eaa9423a6227ce1df954b4759))
+ - [Worldline] change error message from NotSupported to NotImplemented ([#2893](https://github.com/juspay/hyperswitch/pull/2893)) ([`e721b06`](https://github.com/juspay/hyperswitch/commit/e721b06c7077e00458450a4fb98f4497e8227dc6))
+
+### Testing
+
+- **postman:** Update postman collection files ([`9a3fa00`](https://github.com/juspay/hyperswitch/commit/9a3fa00426d74f6d18b3c712b292d98d80d517ba))
+
+**Full Changelog:** [`v1.87.0...v1.88.0`](https://github.com/juspay/hyperswitch/compare/v1.87.0...v1.88.0)
+
+- - -
+
+
+## 1.87.0 (2023-11-22)
+
+### Features
+
+- **api_event_errors:** Error field in APIEvents ([#2808](https://github.com/juspay/hyperswitch/pull/2808)) ([`ce10579`](https://github.com/juspay/hyperswitch/commit/ce10579a729fe4a7d4ab9f1a4cbd38c3ca00e90b))
+- **payment_methods:** Add support for tokenising bank details and fetching masked details while listing ([#2585](https://github.com/juspay/hyperswitch/pull/2585)) ([`9989489`](https://github.com/juspay/hyperswitch/commit/998948953ab8a444aca79957f48e7cfb3066c334))
+- **router:**
+ - Migrate `payment_method_data` to rust locker only if `payment_method` is card ([#2929](https://github.com/juspay/hyperswitch/pull/2929)) ([`f8261a9`](https://github.com/juspay/hyperswitch/commit/f8261a96e758498a32c988191bf314aa6c752059))
+ - Add list payment link support ([#2805](https://github.com/juspay/hyperswitch/pull/2805)) ([`b441a1f`](https://github.com/juspay/hyperswitch/commit/b441a1f2f9d9d84601cf78a6e39145e8fb847593))
+- **routing:** Routing prometheus metrics ([#2870](https://github.com/juspay/hyperswitch/pull/2870)) ([`4e15d77`](https://github.com/juspay/hyperswitch/commit/4e15d7792e3167de170c3d8310f33419f4dfb0db))
+
+### Bug Fixes
+
+- cybersource mandates and fiserv exp year ([#2920](https://github.com/juspay/hyperswitch/pull/2920)) ([`7f74ae9`](https://github.com/juspay/hyperswitch/commit/7f74ae98a1d48eed98341e4505d3801a61e69fc7))
+- Kv logs when KeyNotSet is returned ([#2928](https://github.com/juspay/hyperswitch/pull/2928)) ([`6954de7`](https://github.com/juspay/hyperswitch/commit/6954de77a0fda14d87b79ec7ceee7cc8f1c491db))
+
+### Refactors
+
+- **macros:** Use syn2.0 ([#2890](https://github.com/juspay/hyperswitch/pull/2890)) ([`46e13d5`](https://github.com/juspay/hyperswitch/commit/46e13d54759168ad7667af08d5481ab510e5706a))
+- **mca:** Add Serialization for `ConnectorAuthType` ([#2945](https://github.com/juspay/hyperswitch/pull/2945)) ([`341374b`](https://github.com/juspay/hyperswitch/commit/341374b8e5eced329587b93cbb6bd58e16dd9932))
+
+### Testing
+
+- **postman:** Update postman collection files ([`b96052f`](https://github.com/juspay/hyperswitch/commit/b96052f9c64dd6e49d52ba8befd1f60a843b482a))
+
+### Documentation
+
+- **README:** Update feature support link ([#2894](https://github.com/juspay/hyperswitch/pull/2894)) ([`7d223ee`](https://github.com/juspay/hyperswitch/commit/7d223ee0d1b53c02421ed6bd1b5584362d7a7456))
+
+### Miscellaneous Tasks
+
+- Address Rust 1.74 clippy lints ([#2942](https://github.com/juspay/hyperswitch/pull/2942)) ([`c6a5a85`](https://github.com/juspay/hyperswitch/commit/c6a5a8574825dc333602f4f1cee7e26969eab030))
+
+**Full Changelog:** [`v1.86.0...v1.87.0`](https://github.com/juspay/hyperswitch/compare/v1.86.0...v1.87.0)
+
+- - -
+
+
+## 1.86.0 (2023-11-21)
+
+### Features
+
+- **connector:** [Prophetpay] Save card token for Refund and remove Void flow ([#2927](https://github.com/juspay/hyperswitch/pull/2927)) ([`15a255e`](https://github.com/juspay/hyperswitch/commit/15a255ea60dffad9e4cf20d642636028c27c7c00))
+- Add support for 3ds and surcharge decision through routing rules ([#2869](https://github.com/juspay/hyperswitch/pull/2869)) ([`f8618e0`](https://github.com/juspay/hyperswitch/commit/f8618e077065d94aa27d7153fc5ea6f93870bd81))
+
+### Bug Fixes
+
+- **mca:** Change the check for `disabled` field in mca create and update ([#2938](https://github.com/juspay/hyperswitch/pull/2938)) ([`e66ccde`](https://github.com/juspay/hyperswitch/commit/e66ccde4cf6d055b7d02c5e982d2e09364845602))
+- Status goes from pending to partially captured in psync ([#2915](https://github.com/juspay/hyperswitch/pull/2915)) ([`3f3b797`](https://github.com/juspay/hyperswitch/commit/3f3b797dc65c1bc6f710b122ef00d5bcb409e600))
+
+### Testing
+
+- **postman:** Update postman collection files ([`245e489`](https://github.com/juspay/hyperswitch/commit/245e489d13209da19d6e9af01219056eec04e897))
+
+**Full Changelog:** [`v1.85.0...v1.86.0`](https://github.com/juspay/hyperswitch/compare/v1.85.0...v1.86.0)
+
+- - -
+
+
+## 1.85.0 (2023-11-21)
+
+### Features
+
+- **mca:** Add new `auth_type` and a status field for mca ([#2883](https://github.com/juspay/hyperswitch/pull/2883)) ([`25cef38`](https://github.com/juspay/hyperswitch/commit/25cef386b8876b43893f20b93cd68ece6e68412d))
+- **router:** Add unified_code, unified_message in payments response ([#2918](https://github.com/juspay/hyperswitch/pull/2918)) ([`3954001`](https://github.com/juspay/hyperswitch/commit/39540015fde476ad8492a9142c2c1bfda8444a27))
+
+### Bug Fixes
+
+- **connector:**
+ - [fiserv] fix metadata deserialization in merchant_connector_account ([#2746](https://github.com/juspay/hyperswitch/pull/2746)) ([`644709d`](https://github.com/juspay/hyperswitch/commit/644709d95f6ecaab497cf0cf3788b9e2ed88b855))
+ - [CASHTOCODE] Fix Error Response Handling ([#2926](https://github.com/juspay/hyperswitch/pull/2926)) ([`938b63a`](https://github.com/juspay/hyperswitch/commit/938b63a1fceb87b4aae4211dac4d051e024028b1))
+- **router:** Associate parent payment token with `payment_method_id` as hyperswitch token for saved cards ([#2130](https://github.com/juspay/hyperswitch/pull/2130)) ([`efeebc0`](https://github.com/juspay/hyperswitch/commit/efeebc0f2365f0900de3dd3e10a1539621c9933d))
+- Api lock on PaymentsCreate ([#2916](https://github.com/juspay/hyperswitch/pull/2916)) ([`cfabfa6`](https://github.com/juspay/hyperswitch/commit/cfabfa60db4d275066be72ee64153a34d38f13b8))
+- Merchant_connector_id null in KV flow ([#2810](https://github.com/juspay/hyperswitch/pull/2810)) ([`e566a4e`](https://github.com/juspay/hyperswitch/commit/e566a4eff2270c2a56ec90966f42ccfd79906068))
+
+### Refactors
+
+- **connector:** [Paypal] Add support for both BodyKey and SignatureKey ([#2633](https://github.com/juspay/hyperswitch/pull/2633)) ([`d8fcd3c`](https://github.com/juspay/hyperswitch/commit/d8fcd3c9712480c1230590c4f23b35da79df784d))
+- **core:** Query business profile only once ([#2830](https://github.com/juspay/hyperswitch/pull/2830)) ([`44deeb7`](https://github.com/juspay/hyperswitch/commit/44deeb7e7605cb5320b84c0fac1fd551877803a4))
+- **payment_methods:** Added support for pm_auth_connector field in pm list response ([#2667](https://github.com/juspay/hyperswitch/pull/2667)) ([`be4aa3b`](https://github.com/juspay/hyperswitch/commit/be4aa3b913819698c6c22ddedafe1d90fbe02add))
+- Add mapping for ConnectorError in payouts flow ([#2608](https://github.com/juspay/hyperswitch/pull/2608)) ([`5c4e7c9`](https://github.com/juspay/hyperswitch/commit/5c4e7c9031f62d63af35da2dcab79eac948e7dbb))
+
+### Testing
+
+- **postman:** Update postman collection files ([`ce725ef`](https://github.com/juspay/hyperswitch/commit/ce725ef8c680eea3fe03671c989fd4572cfc0640))
+
+**Full Changelog:** [`v1.84.0...v1.85.0`](https://github.com/juspay/hyperswitch/compare/v1.84.0...v1.85.0)
+
+- - -
+
+
+## 1.84.0 (2023-11-17)
+
+### Features
+
+- **connector:** [BANKOFAMERICA] PSYNC Bugfix ([#2897](https://github.com/juspay/hyperswitch/pull/2897)) ([`bdcc138`](https://github.com/juspay/hyperswitch/commit/bdcc138e8d84577fc99f9a9aef3484b66f98209a))
+
+**Full Changelog:** [`v1.83.1...v1.84.0`](https://github.com/juspay/hyperswitch/compare/v1.83.1...v1.84.0)
+
+- - -
+
+
+## 1.83.1 (2023-11-17)
+
+### Bug Fixes
+
+- **router:** Add choice to use the appropriate key for jws verification ([#2917](https://github.com/juspay/hyperswitch/pull/2917)) ([`606daa9`](https://github.com/juspay/hyperswitch/commit/606daa9367cac8c2ea926313019deab2f938b591))
+
+**Full Changelog:** [`v1.83.0...v1.83.1`](https://github.com/juspay/hyperswitch/compare/v1.83.0...v1.83.1)
+
+- - -
+
+
+## 1.83.0 (2023-11-17)
+
+### Features
+
+- **events:** Add incoming webhook payload to api events logger ([#2852](https://github.com/juspay/hyperswitch/pull/2852)) ([`aea390a`](https://github.com/juspay/hyperswitch/commit/aea390a6a1c331f8e0dbea4f41218e43f7323508))
+- **router:** Custom payment link config for payment create ([#2741](https://github.com/juspay/hyperswitch/pull/2741)) ([`c39beb2`](https://github.com/juspay/hyperswitch/commit/c39beb2501e63bbf7fd41bbc947280d7ff5a71dc))
+
+### Bug Fixes
+
+- **router:** Add rust locker url in proxy_bypass_urls ([#2902](https://github.com/juspay/hyperswitch/pull/2902)) ([`9a201ae`](https://github.com/juspay/hyperswitch/commit/9a201ae698c2cf52e617660f82d5bf1df2e797ae))
+
+### Documentation
+
+- **README:** Replace cloudformation deployment template with latest s3 url. ([#2891](https://github.com/juspay/hyperswitch/pull/2891)) ([`375108b`](https://github.com/juspay/hyperswitch/commit/375108b6df50e041fc9dbeb35a6a6b46b146037a))
+
+**Full Changelog:** [`v1.82.0...v1.83.0`](https://github.com/juspay/hyperswitch/compare/v1.82.0...v1.83.0)
+
+- - -
+
+
+## 1.82.0 (2023-11-17)
+
+### Features
+
+- **router:** Add fallback while add card and retrieve card from rust locker ([#2888](https://github.com/juspay/hyperswitch/pull/2888)) ([`f735fb0`](https://github.com/juspay/hyperswitch/commit/f735fb0551812fd781a2db8bac5a0deef4cabb2b))
+
+### Bug Fixes
+
+- **core:** Introduce new attempt and intent status to handle multiple partial captures ([#2802](https://github.com/juspay/hyperswitch/pull/2802)) ([`cb88be0`](https://github.com/juspay/hyperswitch/commit/cb88be01f22725948648976c2a5606a03b5ce92a))
+
+### Testing
+
+- **postman:** Update postman collection files ([`7d05b74`](https://github.com/juspay/hyperswitch/commit/7d05b74b950d9e078b063e17d046cbeb501d006a))
+
+**Full Changelog:** [`v1.81.0...v1.82.0`](https://github.com/juspay/hyperswitch/compare/v1.81.0...v1.82.0)
+
+- - -
+
+
## 1.81.0 (2023-11-16)
### Features
diff --git a/Cargo.lock b/Cargo.lock
index a03340093c88..f0334ce9cfc1 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -14,7 +14,7 @@ dependencies = [
"futures-sink",
"memchr",
"pin-project-lite",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"tokio-util",
"tracing",
]
@@ -44,8 +44,8 @@ dependencies = [
"actix-rt",
"actix-service",
"actix-utils",
- "ahash 0.8.3",
- "base64 0.21.4",
+ "ahash 0.8.6",
+ "base64 0.21.5",
"bitflags 1.3.2",
"brotli",
"bytes 1.5.0",
@@ -67,7 +67,7 @@ dependencies = [
"rand 0.8.5",
"sha1",
"smallvec 1.11.1",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"tokio-util",
"tracing",
"zstd",
@@ -80,7 +80,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb"
dependencies = [
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -105,7 +105,7 @@ dependencies = [
"serde_json",
"serde_plain",
"tempfile",
- "tokio 1.32.0",
+ "tokio 1.35.1",
]
[[package]]
@@ -118,7 +118,7 @@ dependencies = [
"parse-size",
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -142,7 +142,7 @@ checksum = "28f32d40287d3f402ae0028a9d54bef51af15c8769492826a69d28f81893151d"
dependencies = [
"actix-macros",
"futures-core",
- "tokio 1.32.0",
+ "tokio 1.35.1",
]
[[package]]
@@ -156,9 +156,9 @@ dependencies = [
"actix-utils",
"futures-core",
"futures-util",
- "mio 0.8.8",
- "socket2 0.5.4",
- "tokio 1.32.0",
+ "mio 0.8.10",
+ "socket2 0.5.5",
+ "tokio 1.35.1",
"tracing",
]
@@ -188,11 +188,11 @@ dependencies = [
"pin-project-lite",
"rustls 0.21.7",
"rustls-webpki",
- "tokio 1.32.0",
- "tokio-rustls",
+ "tokio 1.35.1",
+ "tokio-rustls 0.23.4",
"tokio-util",
"tracing",
- "webpki-roots",
+ "webpki-roots 0.22.6",
]
[[package]]
@@ -220,7 +220,7 @@ dependencies = [
"actix-service",
"actix-utils",
"actix-web-codegen",
- "ahash 0.7.6",
+ "ahash 0.7.7",
"bytes 1.5.0",
"bytestring",
"cfg-if 1.0.0",
@@ -255,7 +255,7 @@ dependencies = [
"actix-router",
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -281,25 +281,26 @@ checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234"
[[package]]
name = "ahash"
-version = "0.7.6"
+version = "0.7.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47"
+checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd"
dependencies = [
- "getrandom 0.2.10",
+ "getrandom 0.2.11",
"once_cell",
"version_check",
]
[[package]]
name = "ahash"
-version = "0.8.3"
+version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f"
+checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a"
dependencies = [
"cfg-if 1.0.0",
- "getrandom 0.2.10",
+ "getrandom 0.2.11",
"once_cell",
"version_check",
+ "zerocopy",
]
[[package]]
@@ -332,6 +333,36 @@ version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5"
+[[package]]
+name = "analytics"
+version = "0.1.0"
+dependencies = [
+ "actix-web",
+ "api_models",
+ "async-trait",
+ "aws-config",
+ "aws-sdk-lambda",
+ "aws-smithy-types",
+ "bigdecimal",
+ "common_utils",
+ "diesel_models",
+ "error-stack",
+ "external_services",
+ "futures 0.3.28",
+ "masking",
+ "once_cell",
+ "reqwest",
+ "router_env",
+ "serde",
+ "serde_json",
+ "sqlx",
+ "storage_impl",
+ "strum 0.25.0",
+ "thiserror",
+ "time",
+ "tokio 1.35.1",
+]
+
[[package]]
name = "android-tzdata"
version = "0.1.1"
@@ -375,13 +406,15 @@ dependencies = [
"common_utils",
"error-stack",
"euclid",
+ "frunk",
+ "frunk_core",
"masking",
"mime",
"reqwest",
"router_derive",
"serde",
"serde_json",
- "strum 0.24.1",
+ "strum 0.25.0",
"time",
"url",
"utoipa",
@@ -393,12 +426,6 @@ version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6"
-[[package]]
-name = "arcstr"
-version = "1.1.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3f907281554a3d0312bb7aab855a8e0ef6cbf1614d06de54105039ca8b34460e"
-
[[package]]
name = "argon2"
version = "0.5.2"
@@ -481,7 +508,7 @@ dependencies = [
"bb8",
"diesel",
"thiserror",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"tracing",
]
@@ -506,27 +533,7 @@ dependencies = [
"futures-core",
"memchr",
"pin-project-lite",
- "tokio 1.32.0",
-]
-
-[[package]]
-name = "async-io"
-version = "1.13.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af"
-dependencies = [
- "async-lock",
- "autocfg",
- "cfg-if 1.0.0",
- "concurrent-queue",
- "futures-lite",
- "log",
- "parking",
- "polling",
- "rustix 0.37.25",
- "slab",
- "socket2 0.4.9",
- "waker-fn",
+ "tokio 1.35.1",
]
[[package]]
@@ -557,18 +564,18 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
name = "async-trait"
-version = "0.1.73"
+version = "0.1.77"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0"
+checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -604,8 +611,8 @@ dependencies = [
"actix-service",
"actix-tls",
"actix-utils",
- "ahash 0.7.6",
- "base64 0.21.4",
+ "ahash 0.7.7",
+ "base64 0.21.5",
"bytes 1.5.0",
"cfg-if 1.0.0",
"cookie",
@@ -624,7 +631,7 @@ dependencies = [
"serde",
"serde_json",
"serde_urlencoded",
- "tokio 1.32.0",
+ "tokio 1.35.1",
]
[[package]]
@@ -651,7 +658,7 @@ dependencies = [
"hyper",
"ring",
"time",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"tower",
"tracing",
"zeroize",
@@ -666,7 +673,7 @@ dependencies = [
"aws-smithy-async",
"aws-smithy-types",
"fastrand 1.9.0",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"tracing",
"zeroize",
]
@@ -729,6 +736,31 @@ dependencies = [
"tracing",
]
+[[package]]
+name = "aws-sdk-lambda"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b3ad176ffaa3aafa532246eb6a9f18a7d68da19950704ecc95d33d9dc3c62a9b"
+dependencies = [
+ "aws-credential-types",
+ "aws-endpoint",
+ "aws-http",
+ "aws-sig-auth",
+ "aws-smithy-async",
+ "aws-smithy-client",
+ "aws-smithy-http",
+ "aws-smithy-http-tower",
+ "aws-smithy-json",
+ "aws-smithy-types",
+ "aws-types",
+ "bytes 1.5.0",
+ "http",
+ "regex",
+ "tokio-stream",
+ "tower",
+ "tracing",
+]
+
[[package]]
name = "aws-sdk-s3"
version = "0.28.0"
@@ -882,7 +914,7 @@ checksum = "13bda3996044c202d75b91afeb11a9afae9db9a721c6a7a427410018e286b880"
dependencies = [
"futures-util",
"pin-project-lite",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"tokio-stream",
]
@@ -922,11 +954,11 @@ dependencies = [
"http",
"http-body",
"hyper",
- "hyper-rustls",
+ "hyper-rustls 0.23.2",
"lazy_static",
"pin-project-lite",
"rustls 0.20.9",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"tower",
"tracing",
]
@@ -960,7 +992,7 @@ dependencies = [
"percent-encoding",
"pin-project-lite",
"pin-utils",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"tokio-util",
"tracing",
]
@@ -1106,9 +1138,9 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
[[package]]
name = "base64"
-version = "0.21.4"
+version = "0.21.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9ba43ea6f343b788c8764558649e08df62f86c6ef251fdaeb1ffd010a9ae50a2"
+checksum = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9"
[[package]]
name = "base64-simd"
@@ -1136,7 +1168,7 @@ dependencies = [
"futures-channel",
"futures-util",
"parking_lot 0.12.1",
- "tokio 1.32.0",
+ "tokio 1.35.1",
]
[[package]]
@@ -1148,6 +1180,7 @@ dependencies = [
"num-bigint",
"num-integer",
"num-traits",
+ "serde",
]
[[package]]
@@ -1186,6 +1219,18 @@ version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635"
+[[package]]
+name = "bitvec"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c"
+dependencies = [
+ "funty",
+ "radium",
+ "tap",
+ "wyz",
+]
+
[[package]]
name = "blake2"
version = "0.10.6"
@@ -1227,6 +1272,30 @@ dependencies = [
"generic-array",
]
+[[package]]
+name = "borsh"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bf617fabf5cdbdc92f774bfe5062d870f228b80056d41180797abf48bed4056e"
+dependencies = [
+ "borsh-derive",
+ "cfg_aliases",
+]
+
+[[package]]
+name = "borsh-derive"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f404657a7ea7b5249e36808dff544bc88a28f26e0ac40009f674b7a009d14be3"
+dependencies = [
+ "once_cell",
+ "proc-macro-crate 2.0.0",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.48",
+ "syn_derive",
+]
+
[[package]]
name = "brotli"
version = "3.4.0"
@@ -1264,6 +1333,28 @@ version = "3.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec"
+[[package]]
+name = "bytecheck"
+version = "0.6.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b6372023ac861f6e6dc89c8344a8f398fb42aaba2b5dbc649ca0c0e9dbcb627"
+dependencies = [
+ "bytecheck_derive",
+ "ptr_meta",
+ "simdutf8",
+]
+
+[[package]]
+name = "bytecheck_derive"
+version = "0.6.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a7ec4c6f261935ad534c0c22dbef2201b45918860eb1c574b972bd213a76af61"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+]
+
[[package]]
name = "bytecount"
version = "0.6.4"
@@ -1334,6 +1425,7 @@ dependencies = [
"error-stack",
"luhn",
"masking",
+ "router_env",
"serde",
"serde_json",
"thiserror",
@@ -1415,6 +1507,12 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+[[package]]
+name = "cfg_aliases"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e"
+
[[package]]
name = "checked_int_cast"
version = "1.0.0"
@@ -1433,7 +1531,7 @@ dependencies = [
"num-traits",
"serde",
"wasm-bindgen",
- "windows-targets",
+ "windows-targets 0.48.5",
]
[[package]]
@@ -1487,9 +1585,9 @@ dependencies = [
[[package]]
name = "clap"
-version = "4.3.4"
+version = "4.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "80672091db20273a15cf9fdd4e47ed43b5091ec9841bf4c6145c9dfbbcae09ed"
+checksum = "401a4694d2bf92537b6867d94de48c4842089645fdcdf6c71865b175d836e9c2"
dependencies = [
"clap_builder",
"clap_derive",
@@ -1498,9 +1596,9 @@ dependencies = [
[[package]]
name = "clap_builder"
-version = "4.3.4"
+version = "4.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1458a1df40e1e2afebb7ab60ce55c1fa8f431146205aa5f4887e0b111c27636"
+checksum = "72394f3339a76daf211e57d4bcb374410f3965dcc606dd0e03738c7888766980"
dependencies = [
"anstyle",
"bitflags 1.3.2",
@@ -1516,7 +1614,7 @@ dependencies = [
"heck",
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -1571,7 +1669,7 @@ dependencies = [
"once_cell",
"phonenumber",
"proptest",
- "quick-xml",
+ "quick-xml 0.28.2",
"rand 0.8.5",
"regex",
"reqwest",
@@ -1587,7 +1685,7 @@ dependencies = [
"test-case",
"thiserror",
"time",
- "tokio 1.32.0",
+ "tokio 1.35.1",
]
[[package]]
@@ -1618,6 +1716,29 @@ dependencies = [
"yaml-rust",
]
+[[package]]
+name = "config_importer"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "clap",
+ "indexmap 2.1.0",
+ "serde",
+ "serde_json",
+ "toml 0.7.4",
+]
+
+[[package]]
+name = "connector_configs"
+version = "0.1.0"
+dependencies = [
+ "api_models",
+ "serde",
+ "serde_with",
+ "toml 0.7.4",
+ "utoipa",
+]
+
[[package]]
name = "constant_time_eq"
version = "0.2.6"
@@ -1858,6 +1979,17 @@ dependencies = [
"typenum",
]
+[[package]]
+name = "currency_conversion"
+version = "0.1.0"
+dependencies = [
+ "common_enums",
+ "rust_decimal",
+ "rusty-money",
+ "serde",
+ "thiserror",
+]
+
[[package]]
name = "darling"
version = "0.14.4"
@@ -1903,7 +2035,7 @@ dependencies = [
"proc-macro2",
"quote",
"strsim",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -1925,7 +2057,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5"
dependencies = [
"darling_core 0.20.3",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -1935,7 +2067,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856"
dependencies = [
"cfg-if 1.0.0",
- "hashbrown 0.14.1",
+ "hashbrown 0.14.3",
"lock_api 0.4.10",
"once_cell",
"parking_lot_core 0.9.8",
@@ -1955,6 +2087,7 @@ dependencies = [
"async-trait",
"common_enums",
"common_utils",
+ "diesel_models",
"error-stack",
"masking",
"serde",
@@ -1973,7 +2106,7 @@ dependencies = [
"deadpool-runtime",
"num_cpus",
"retain_mut",
- "tokio 1.32.0",
+ "tokio 1.35.1",
]
[[package]]
@@ -2006,6 +2139,37 @@ dependencies = [
"rusticata-macros",
]
+[[package]]
+name = "derive_builder"
+version = "0.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8d67778784b508018359cbc8696edb3db78160bab2c2a28ba7f56ef6932997f8"
+dependencies = [
+ "derive_builder_macro",
+]
+
+[[package]]
+name = "derive_builder_core"
+version = "0.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c11bdc11a0c47bc7d37d582b5285da6849c96681023680b906673c5707af7b0f"
+dependencies = [
+ "darling 0.14.4",
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+]
+
+[[package]]
+name = "derive_builder_macro"
+version = "0.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ebcda35c7a396850a55ffeac740804b40ffec779b98fffbb1738f4033f0ee79e"
+dependencies = [
+ "derive_builder_core",
+ "syn 1.0.109",
+]
+
[[package]]
name = "derive_deref"
version = "1.1.1"
@@ -2061,7 +2225,7 @@ dependencies = [
"diesel_table_macro_syntax",
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -2091,7 +2255,7 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc5557efc453706fed5e4fa85006fe9817c224c3f480a34c7e5959fd700921c5"
dependencies = [
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -2148,7 +2312,7 @@ checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -2168,6 +2332,7 @@ name = "drainer"
version = "0.1.0"
dependencies = [
"async-bb8-diesel",
+ "async-trait",
"bb8",
"clap",
"common_utils",
@@ -2184,7 +2349,7 @@ dependencies = [
"serde_json",
"serde_path_to_error",
"thiserror",
- "tokio 1.32.0",
+ "tokio 1.35.1",
]
[[package]]
@@ -2225,23 +2390,12 @@ dependencies = [
[[package]]
name = "errno"
-version = "0.3.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "add4f07d43996f76ef320709726a556a9d4f965d9410d8d0271132d2f8293480"
-dependencies = [
- "errno-dragonfly",
- "libc",
- "windows-sys",
-]
-
-[[package]]
-name = "errno-dragonfly"
-version = "0.1.2"
+version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf"
+checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245"
dependencies = [
- "cc",
"libc",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -2281,6 +2435,7 @@ dependencies = [
"serde_json",
"strum 0.25.0",
"thiserror",
+ "utoipa",
]
[[package]]
@@ -2299,8 +2454,11 @@ name = "euclid_wasm"
version = "0.1.0"
dependencies = [
"api_models",
+ "common_enums",
+ "connector_configs",
+ "currency_conversion",
"euclid",
- "getrandom 0.2.10",
+ "getrandom 0.2.11",
"kgraph_utils",
"once_cell",
"ron-parser",
@@ -2323,18 +2481,24 @@ dependencies = [
"async-trait",
"aws-config",
"aws-sdk-kms",
+ "aws-sdk-s3",
"aws-sdk-sesv2",
+ "aws-sdk-sts",
"aws-smithy-client",
- "base64 0.21.4",
+ "base64 0.21.5",
"common_utils",
"dyn-clone",
"error-stack",
+ "hex",
+ "hyper",
+ "hyper-proxy",
"masking",
"once_cell",
"router_env",
"serde",
"thiserror",
- "tokio 1.32.0",
+ "tokio 1.35.1",
+ "vaultrs",
]
[[package]]
@@ -2359,12 +2523,12 @@ dependencies = [
"futures-util",
"http",
"hyper",
- "hyper-rustls",
+ "hyper-rustls 0.23.2",
"mime",
"serde",
"serde_json",
"time",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"url",
"webdriver",
]
@@ -2441,16 +2605,14 @@ dependencies = [
[[package]]
name = "fred"
-version = "6.3.2"
+version = "7.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a15cc18b56395b8b15ffcdcea7fe8586e3a3ccb3d9dc3b9408800d9814efb08e"
+checksum = "9282e65613822eea90c99872c51afa1de61542215cb11f91456a93f50a5a131a"
dependencies = [
"arc-swap",
- "arcstr",
"async-trait",
"bytes 1.5.0",
"bytes-utils",
- "cfg-if 1.0.0",
"float-cmp",
"futures 0.3.28",
"lazy_static",
@@ -2459,13 +2621,14 @@ dependencies = [
"rand 0.8.5",
"redis-protocol",
"semver 1.0.19",
- "sha-1 0.10.1",
- "tokio 1.32.0",
+ "socket2 0.5.5",
+ "tokio 1.35.1",
"tokio-stream",
"tokio-util",
"tracing",
"tracing-futures",
"url",
+ "urlencoding",
]
[[package]]
@@ -2493,7 +2656,7 @@ checksum = "b0fa992f1656e1707946bbba340ad244f0814009ef8c0118eb7b658395f19a2e"
dependencies = [
"frunk_proc_macro_helpers",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -2505,7 +2668,7 @@ dependencies = [
"frunk_core",
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -2517,7 +2680,7 @@ dependencies = [
"frunk_core",
"frunk_proc_macro_helpers",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -2536,6 +2699,12 @@ version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
+[[package]]
+name = "funty"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
+
[[package]]
name = "futures"
version = "0.1.31"
@@ -2559,9 +2728,9 @@ dependencies = [
[[package]]
name = "futures-channel"
-version = "0.3.28"
+version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2"
+checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78"
dependencies = [
"futures-core",
"futures-sink",
@@ -2569,9 +2738,9 @@ dependencies = [
[[package]]
name = "futures-core"
-version = "0.3.28"
+version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c"
+checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d"
[[package]]
name = "futures-executor"
@@ -2597,9 +2766,9 @@ dependencies = [
[[package]]
name = "futures-io"
-version = "0.3.28"
+version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964"
+checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1"
[[package]]
name = "futures-lite"
@@ -2618,26 +2787,26 @@ dependencies = [
[[package]]
name = "futures-macro"
-version = "0.3.28"
+version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72"
+checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
name = "futures-sink"
-version = "0.3.28"
+version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e"
+checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5"
[[package]]
name = "futures-task"
-version = "0.3.28"
+version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65"
+checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004"
[[package]]
name = "futures-timer"
@@ -2647,9 +2816,9 @@ checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c"
[[package]]
name = "futures-util"
-version = "0.3.28"
+version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533"
+checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48"
dependencies = [
"futures-channel",
"futures-core",
@@ -2680,7 +2849,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0176e0459c2e4a1fe232f984bca6890e681076abb9934f6cea7c326f3fc47818"
dependencies = [
"libc",
- "windows-targets",
+ "windows-targets 0.48.5",
]
[[package]]
@@ -2696,9 +2865,9 @@ dependencies = [
[[package]]
name = "getrandom"
-version = "0.2.10"
+version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427"
+checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f"
dependencies = [
"cfg-if 1.0.0",
"js-sys",
@@ -2768,9 +2937,9 @@ dependencies = [
[[package]]
name = "h2"
-version = "0.3.21"
+version = "0.3.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833"
+checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9"
dependencies = [
"bytes 1.5.0",
"fnv",
@@ -2778,9 +2947,9 @@ dependencies = [
"futures-sink",
"futures-util",
"http",
- "indexmap 1.9.3",
+ "indexmap 2.1.0",
"slab",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"tokio-util",
"tracing",
]
@@ -2797,16 +2966,16 @@ version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
dependencies = [
- "ahash 0.7.6",
+ "ahash 0.7.7",
]
[[package]]
name = "hashbrown"
-version = "0.14.1"
+version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7dfda62a12f55daeae5015f81b0baea145391cb4520f86c248fc615d72640d12"
+checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
dependencies = [
- "ahash 0.8.3",
+ "ahash 0.8.6",
"allocator-api2",
]
@@ -2816,7 +2985,31 @@ version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7"
dependencies = [
- "hashbrown 0.14.1",
+ "hashbrown 0.14.3",
+]
+
+[[package]]
+name = "headers"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06683b93020a07e3dbcf5f8c0f6d40080d725bea7936fc01ad345c01b97dc270"
+dependencies = [
+ "base64 0.21.5",
+ "bytes 1.5.0",
+ "headers-core",
+ "http",
+ "httpdate",
+ "mime",
+ "sha1",
+]
+
+[[package]]
+name = "headers-core"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429"
+dependencies = [
+ "http",
]
[[package]]
@@ -2940,12 +3133,30 @@ dependencies = [
"itoa",
"pin-project-lite",
"socket2 0.4.9",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"tower-service",
"tracing",
"want",
]
+[[package]]
+name = "hyper-proxy"
+version = "0.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ca815a891b24fdfb243fa3239c86154392b0953ee584aa1a2a1f66d20cbe75cc"
+dependencies = [
+ "bytes 1.5.0",
+ "futures 0.3.28",
+ "headers",
+ "http",
+ "hyper",
+ "hyper-tls",
+ "native-tls",
+ "tokio 1.35.1",
+ "tokio-native-tls",
+ "tower-service",
+]
+
[[package]]
name = "hyper-rustls"
version = "0.23.2"
@@ -2957,8 +3168,22 @@ dependencies = [
"log",
"rustls 0.20.9",
"rustls-native-certs",
- "tokio 1.32.0",
- "tokio-rustls",
+ "tokio 1.35.1",
+ "tokio-rustls 0.23.4",
+]
+
+[[package]]
+name = "hyper-rustls"
+version = "0.24.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590"
+dependencies = [
+ "futures-util",
+ "http",
+ "hyper",
+ "rustls 0.21.7",
+ "tokio 1.35.1",
+ "tokio-rustls 0.24.1",
]
[[package]]
@@ -2969,7 +3194,7 @@ checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1"
dependencies = [
"hyper",
"pin-project-lite",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"tokio-io-timeout",
]
@@ -2982,22 +3207,22 @@ dependencies = [
"bytes 1.5.0",
"hyper",
"native-tls",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"tokio-native-tls",
]
[[package]]
name = "iana-time-zone"
-version = "0.1.57"
+version = "0.1.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613"
+checksum = "8326b86b6cff230b97d0d312a6c40a60726df3332e721f72a1b035f451663b20"
dependencies = [
"android_system_properties",
"core-foundation-sys",
"iana-time-zone-haiku",
"js-sys",
"wasm-bindgen",
- "windows",
+ "windows-core",
]
[[package]]
@@ -3080,12 +3305,12 @@ dependencies = [
[[package]]
name = "indexmap"
-version = "2.0.2"
+version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8adf3ddd720272c6ea8bf59463c04e0f93d0bbf7c5439b691bca2987e0270897"
+checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f"
dependencies = [
"equivalent",
- "hashbrown 0.14.1",
+ "hashbrown 0.14.3",
"serde",
]
@@ -3113,17 +3338,6 @@ dependencies = [
"cfg-if 1.0.0",
]
-[[package]]
-name = "io-lifetimes"
-version = "1.0.11"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2"
-dependencies = [
- "hermit-abi",
- "libc",
- "windows-sys",
-]
-
[[package]]
name = "iovec"
version = "0.1.4"
@@ -3146,8 +3360,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b"
dependencies = [
"hermit-abi",
- "rustix 0.38.17",
- "windows-sys",
+ "rustix",
+ "windows-sys 0.48.0",
]
[[package]]
@@ -3190,7 +3404,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33a96c4f2128a6f44ecf7c36df2b03dddf5a07b060a4d5ebc0a81e9821f7c60e"
dependencies = [
"anyhow",
- "base64 0.21.4",
+ "base64 0.21.5",
"flate2",
"once_cell",
"openssl",
@@ -3236,7 +3450,7 @@ version = "8.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6971da4d9c3aa03c3d8f3ff0f4155b534aad021292003895a469716b2a230378"
dependencies = [
- "base64 0.21.4",
+ "base64 0.21.5",
"pem",
"ring",
"serde",
@@ -3259,6 +3473,7 @@ name = "kgraph_utils"
version = "0.1.0"
dependencies = [
"api_models",
+ "common_enums",
"criterion",
"euclid",
"masking",
@@ -3281,9 +3496,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
-version = "0.2.148"
+version = "0.2.150"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9cdc71e17332e86d2e1d38c1f99edcb6288ee11b815fb1a4b049eaa2114d369b"
+checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c"
[[package]]
name = "libgit2-sys"
@@ -3333,15 +3548,9 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
[[package]]
name = "linux-raw-sys"
-version = "0.3.8"
+version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519"
-
-[[package]]
-name = "linux-raw-sys"
-version = "0.4.8"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3852614a3bd9ca9804678ba6be5e3b8ce76dfc902cae004e3e0c44051b6e88db"
+checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c"
[[package]]
name = "local-channel"
@@ -3418,6 +3627,7 @@ version = "0.1.0"
dependencies = [
"bytes 1.5.0",
"diesel",
+ "erased-serde",
"serde",
"serde_json",
"subtle",
@@ -3589,14 +3799,14 @@ dependencies = [
[[package]]
name = "mio"
-version = "0.8.8"
+version = "0.8.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2"
+checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09"
dependencies = [
"libc",
"log",
"wasi 0.11.0+wasi-snapshot-preview1",
- "windows-sys",
+ "windows-sys 0.48.0",
]
[[package]]
@@ -3624,12 +3834,12 @@ dependencies = [
[[package]]
name = "moka"
-version = "0.11.3"
+version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fa6e72583bf6830c956235bff0d5afec8cf2952f579ebad18ae7821a917d950f"
+checksum = "d8017ec3548ffe7d4cef7ac0e12b044c01164a74c0f3119420faeaf13490ad8b"
dependencies = [
- "async-io",
"async-lock",
+ "async-trait",
"crossbeam-channel",
"crossbeam-epoch 0.9.15",
"crossbeam-utils 0.8.16",
@@ -3638,7 +3848,6 @@ dependencies = [
"parking_lot 0.12.1",
"quanta",
"rustc_version 0.4.0",
- "scheduled-thread-pool",
"skeptic",
"smallvec 1.11.1",
"tagptr",
@@ -3647,6 +3856,12 @@ dependencies = [
"uuid",
]
+[[package]]
+name = "mutually_exclusive_features"
+version = "0.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d02c0b00610773bb7fc61d85e13d86c7858cbdf00e1a120bfc41bc055dbaa0e"
+
[[package]]
name = "nanoid"
version = "0.4.0"
@@ -3750,9 +3965,9 @@ dependencies = [
[[package]]
name = "num-traits"
-version = "0.2.16"
+version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2"
+checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c"
dependencies = [
"autocfg",
"libm",
@@ -3768,6 +3983,27 @@ dependencies = [
"libc",
]
+[[package]]
+name = "num_enum"
+version = "0.5.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9"
+dependencies = [
+ "num_enum_derive",
+]
+
+[[package]]
+name = "num_enum_derive"
+version = "0.5.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799"
+dependencies = [
+ "proc-macro-crate 1.3.1",
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+]
+
[[package]]
name = "object"
version = "0.32.1"
@@ -3810,11 +4046,20 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
+[[package]]
+name = "openapi"
+version = "0.1.0"
+dependencies = [
+ "api_models",
+ "serde_json",
+ "utoipa",
+]
+
[[package]]
name = "openssl"
-version = "0.10.57"
+version = "0.10.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bac25ee399abb46215765b1cb35bc0212377e58a061560d8b29b024fd0430e7c"
+checksum = "79a4c6c3a2b158f7f8f2a2fc5a969fa3a068df6fc9dbb4a43845436e3af7c800"
dependencies = [
"bitflags 2.4.0",
"cfg-if 1.0.0",
@@ -3833,7 +4078,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -3844,9 +4089,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
[[package]]
name = "openssl-sys"
-version = "0.9.93"
+version = "0.9.96"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "db4d56a4c0478783083cfafcc42493dd4a981d41669da64b4572a2a089b51b1d"
+checksum = "3812c071ba60da8b5677cc12bcb1d42989a65553772897a7e0355545a819838f"
dependencies = [
"cc",
"libc",
@@ -3878,7 +4123,7 @@ dependencies = [
"opentelemetry-proto",
"prost",
"thiserror",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"tonic",
]
@@ -3929,7 +4174,7 @@ dependencies = [
"percent-encoding",
"rand 0.8.5",
"thiserror",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"tokio-stream",
]
@@ -4032,7 +4277,7 @@ dependencies = [
"libc",
"redox_syscall 0.3.5",
"smallvec 1.11.1",
- "windows-targets",
+ "windows-targets 0.48.5",
]
[[package]]
@@ -4119,7 +4364,7 @@ dependencies = [
"pest_meta",
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -4183,7 +4428,7 @@ dependencies = [
"itertools 0.11.0",
"lazy_static",
"nom",
- "quick-xml",
+ "quick-xml 0.28.2",
"regex",
"regex-cache",
"serde",
@@ -4209,7 +4454,7 @@ checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -4258,6 +4503,27 @@ dependencies = [
"plotters-backend",
]
+[[package]]
+name = "pm_auth"
+version = "0.1.0"
+dependencies = [
+ "api_models",
+ "async-trait",
+ "bytes 1.5.0",
+ "common_enums",
+ "common_utils",
+ "error-stack",
+ "http",
+ "masking",
+ "mime",
+ "router_derive",
+ "router_env",
+ "serde",
+ "serde_json",
+ "strum 0.24.1",
+ "thiserror",
+]
+
[[package]]
name = "png"
version = "0.16.8"
@@ -4270,22 +4536,6 @@ dependencies = [
"miniz_oxide 0.3.7",
]
-[[package]]
-name = "polling"
-version = "2.8.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce"
-dependencies = [
- "autocfg",
- "bitflags 1.3.2",
- "cfg-if 1.0.0",
- "concurrent-queue",
- "libc",
- "log",
- "pin-project-lite",
- "windows-sys",
-]
-
[[package]]
name = "ppv-lite86"
version = "0.2.17"
@@ -4301,6 +4551,25 @@ dependencies = [
"vcpkg",
]
+[[package]]
+name = "proc-macro-crate"
+version = "1.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919"
+dependencies = [
+ "once_cell",
+ "toml_edit 0.19.14",
+]
+
+[[package]]
+name = "proc-macro-crate"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8"
+dependencies = [
+ "toml_edit 0.20.2",
+]
+
[[package]]
name = "proc-macro-error"
version = "1.0.4"
@@ -4327,9 +4596,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
-version = "1.0.68"
+version = "1.0.76"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5b1106fec09662ec6dd98ccac0f81cef56984d0b49f75c92d8cbad76e20c005c"
+checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c"
dependencies = [
"unicode-ident",
]
@@ -4377,6 +4646,26 @@ dependencies = [
"syn 1.0.109",
]
+[[package]]
+name = "ptr_meta"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0738ccf7ea06b608c10564b31debd4f5bc5e197fc8bfe088f68ae5ce81e7a4f1"
+dependencies = [
+ "ptr_meta_derive",
+]
+
+[[package]]
+name = "ptr_meta_derive"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+]
+
[[package]]
name = "pulldown-cmark"
version = "0.9.3"
@@ -4430,11 +4719,21 @@ dependencies = [
"serde",
]
+[[package]]
+name = "quick-xml"
+version = "0.31.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1004a344b30a54e2ee58d66a71b32d2db2feb0a31f9a2d302bf0536f15de2a33"
+dependencies = [
+ "memchr",
+ "serde",
+]
+
[[package]]
name = "quote"
-version = "1.0.33"
+version = "1.0.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
+checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
dependencies = [
"proc-macro2",
]
@@ -4450,6 +4749,12 @@ dependencies = [
"scheduled-thread-pool",
]
+[[package]]
+name = "radium"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09"
+
[[package]]
name = "rand"
version = "0.7.3"
@@ -4509,7 +4814,7 @@ version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
- "getrandom 0.2.10",
+ "getrandom 0.2.11",
]
[[package]]
@@ -4559,6 +4864,36 @@ dependencies = [
"crossbeam-utils 0.8.16",
]
+[[package]]
+name = "rdkafka"
+version = "0.36.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d54f02a5a40220f8a2dfa47ddb38ba9064475a5807a69504b6f91711df2eea63"
+dependencies = [
+ "futures-channel",
+ "futures-util",
+ "libc",
+ "log",
+ "rdkafka-sys",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "slab",
+ "tokio 1.35.1",
+]
+
+[[package]]
+name = "rdkafka-sys"
+version = "4.7.0+2.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "55e0d2f9ba6253f6ec72385e453294f8618e9e15c2c6aba2a5c01ccf9622d615"
+dependencies = [
+ "libc",
+ "libz-sys",
+ "num_enum",
+ "pkg-config",
+]
+
[[package]]
name = "redis-protocol"
version = "4.1.0"
@@ -4584,7 +4919,8 @@ dependencies = [
"router_env",
"serde",
"thiserror",
- "tokio 1.32.0",
+ "tokio 1.35.1",
+ "tokio-stream",
]
[[package]]
@@ -4617,7 +4953,7 @@ version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b"
dependencies = [
- "getrandom 0.2.10",
+ "getrandom 0.2.11",
"redox_syscall 0.2.16",
"thiserror",
]
@@ -4678,6 +5014,15 @@ version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da"
+[[package]]
+name = "rend"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a2571463863a6bd50c32f94402933f03457a3fbaf697a707c5be741e459f08fd"
+dependencies = [
+ "bytecheck",
+]
+
[[package]]
name = "reqwest"
version = "0.11.22"
@@ -4685,7 +5030,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "046cd98826c46c2ac8ddecae268eb5c2e58628688a5fc7a2643704a73faba95b"
dependencies = [
"async-compression",
- "base64 0.21.4",
+ "base64 0.21.5",
"bytes 1.5.0",
"encoding_rs",
"futures-core",
@@ -4694,6 +5039,7 @@ dependencies = [
"http",
"http-body",
"hyper",
+ "hyper-rustls 0.24.2",
"hyper-tls",
"ipnet",
"js-sys",
@@ -4704,18 +5050,22 @@ dependencies = [
"once_cell",
"percent-encoding",
"pin-project-lite",
+ "rustls 0.21.7",
+ "rustls-pemfile",
"serde",
"serde_json",
"serde_urlencoded",
"system-configuration",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"tokio-native-tls",
+ "tokio-rustls 0.24.1",
"tokio-util",
"tower-service",
"url",
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
+ "webpki-roots 0.25.3",
"winreg",
]
@@ -4740,6 +5090,34 @@ dependencies = [
"winapi 0.3.9",
]
+[[package]]
+name = "rkyv"
+version = "0.7.42"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0200c8230b013893c0b2d6213d6ec64ed2b9be2e0e016682b7224ff82cff5c58"
+dependencies = [
+ "bitvec",
+ "bytecheck",
+ "hashbrown 0.12.3",
+ "ptr_meta",
+ "rend",
+ "rkyv_derive",
+ "seahash",
+ "tinyvec",
+ "uuid",
+]
+
+[[package]]
+name = "rkyv_derive"
+version = "0.7.42"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b2e06b915b5c230a17d7a736d1e2e63ee753c256a8614ef3f5147b13a4f5541d"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+]
+
[[package]]
name = "ron"
version = "0.7.1"
@@ -4773,14 +5151,13 @@ dependencies = [
"actix-multipart",
"actix-rt",
"actix-web",
+ "analytics",
"api_models",
"argon2",
"async-bb8-diesel",
"async-trait",
"awc",
- "aws-config",
- "aws-sdk-s3",
- "base64 0.21.4",
+ "base64 0.21.5",
"bb8",
"bigdecimal",
"blake3",
@@ -4790,6 +5167,7 @@ dependencies = [
"common_enums",
"common_utils",
"config",
+ "currency_conversion",
"data_models",
"derive_deref",
"diesel",
@@ -4797,6 +5175,7 @@ dependencies = [
"digest 0.9.0",
"dyn-clone",
"encoding_rs",
+ "erased-serde",
"error-stack",
"euclid",
"external_services",
@@ -4816,10 +5195,14 @@ dependencies = [
"nanoid",
"num_cpus",
"once_cell",
+ "openapi",
"openssl",
+ "pm_auth",
"qrcode",
+ "quick-xml 0.31.0",
"rand 0.8.5",
"rand_chacha 0.3.1",
+ "rdkafka",
"redis_interface",
"regex",
"reqwest",
@@ -4827,6 +5210,7 @@ dependencies = [
"router_derive",
"router_env",
"roxmltree",
+ "rust_decimal",
"rustc-hash",
"scheduler",
"serde",
@@ -4836,20 +5220,19 @@ dependencies = [
"serde_urlencoded",
"serde_with",
"serial_test",
- "sha-1 0.9.8",
+ "sha-1",
"sqlx",
"storage_impl",
- "strum 0.24.1",
+ "strum 0.25.0",
"tera",
"test_utils",
"thiserror",
"time",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"tracing-futures",
"unicode-segmentation",
"url",
"utoipa",
- "utoipa-swagger-ui",
"uuid",
"validator",
"wiremock",
@@ -4860,15 +5243,14 @@ dependencies = [
name = "router_derive"
version = "0.1.0"
dependencies = [
- "darling 0.14.4",
"diesel",
- "indexmap 2.0.2",
+ "indexmap 2.1.0",
"proc-macro2",
"quote",
"serde",
"serde_json",
"strum 0.24.1",
- "syn 1.0.109",
+ "syn 2.0.48",
]
[[package]]
@@ -4888,7 +5270,7 @@ dependencies = [
"serde_path_to_error",
"strum 0.24.1",
"time",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"tracing",
"tracing-actix-web",
"tracing-appender",
@@ -4907,41 +5289,6 @@ dependencies = [
"xmlparser",
]
-[[package]]
-name = "rust-embed"
-version = "6.8.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a36224c3276f8c4ebc8c20f158eca7ca4359c8db89991c4925132aaaf6702661"
-dependencies = [
- "rust-embed-impl",
- "rust-embed-utils",
- "walkdir",
-]
-
-[[package]]
-name = "rust-embed-impl"
-version = "6.8.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "49b94b81e5b2c284684141a2fb9e2a31be90638caf040bf9afbc5a0416afe1ac"
-dependencies = [
- "proc-macro2",
- "quote",
- "rust-embed-utils",
- "shellexpand",
- "syn 2.0.38",
- "walkdir",
-]
-
-[[package]]
-name = "rust-embed-utils"
-version = "7.8.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9d38ff6bf570dc3bb7100fce9f7b60c33fa71d80e88da3f2580df4ff2bdded74"
-dependencies = [
- "sha2",
- "walkdir",
-]
-
[[package]]
name = "rust-ini"
version = "0.18.0"
@@ -4952,6 +5299,22 @@ dependencies = [
"ordered-multimap",
]
+[[package]]
+name = "rust_decimal"
+version = "1.33.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06676aec5ccb8fc1da723cc8c0f9a46549f21ebb8753d3915c6c41db1e7f1dc4"
+dependencies = [
+ "arrayvec",
+ "borsh",
+ "bytes 1.5.0",
+ "num-traits",
+ "rand 0.8.5",
+ "rkyv",
+ "serde",
+ "serde_json",
+]
+
[[package]]
name = "rustc-demangle"
version = "0.1.23"
@@ -4992,30 +5355,50 @@ dependencies = [
]
[[package]]
-name = "rustix"
-version = "0.37.25"
+name = "rustify"
+version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d4eb579851244c2c03e7c24f501c3432bed80b8f720af1d6e5b0e0f01555a035"
+checksum = "e9c02e25271068de581e03ac3bb44db60165ff1a10d92b9530192ccb898bc706"
dependencies = [
- "bitflags 1.3.2",
- "errno",
- "io-lifetimes",
- "libc",
- "linux-raw-sys 0.3.8",
- "windows-sys",
+ "anyhow",
+ "async-trait",
+ "bytes 1.5.0",
+ "http",
+ "reqwest",
+ "rustify_derive",
+ "serde",
+ "serde_json",
+ "serde_urlencoded",
+ "thiserror",
+ "tracing",
+ "url",
+]
+
+[[package]]
+name = "rustify_derive"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "58135536c18c04f4634bedad182a3f41baf33ef811cc38a3ec7b7061c57134c8"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "regex",
+ "serde_urlencoded",
+ "syn 1.0.109",
+ "synstructure",
]
[[package]]
name = "rustix"
-version = "0.38.17"
+version = "0.38.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f25469e9ae0f3d0047ca8b93fc56843f38e6774f0914a107ff8b41be8be8e0b7"
+checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316"
dependencies = [
"bitflags 2.4.0",
"errno",
"libc",
- "linux-raw-sys 0.4.8",
- "windows-sys",
+ "linux-raw-sys",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -5060,7 +5443,7 @@ version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2"
dependencies = [
- "base64 0.21.4",
+ "base64 0.21.5",
]
[[package]]
@@ -5091,6 +5474,14 @@ dependencies = [
"wait-timeout",
]
+[[package]]
+name = "rusty-money"
+version = "0.4.1"
+source = "git+https://github.com/varunsrin/rusty_money?rev=bbc0150742a0fff905225ff11ee09388e9babdcc#bbc0150742a0fff905225ff11ee09388e9babdcc"
+dependencies = [
+ "rust_decimal",
+]
+
[[package]]
name = "ryu"
version = "1.0.15"
@@ -5112,7 +5503,7 @@ version = "0.1.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88"
dependencies = [
- "windows-sys",
+ "windows-sys 0.48.0",
]
[[package]]
@@ -5135,6 +5526,7 @@ dependencies = [
"external_services",
"futures 0.3.28",
"masking",
+ "num_cpus",
"once_cell",
"rand 0.8.5",
"redis_interface",
@@ -5145,7 +5537,7 @@ dependencies = [
"strum 0.24.1",
"thiserror",
"time",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"uuid",
]
@@ -5171,6 +5563,12 @@ dependencies = [
"untrusted",
]
+[[package]]
+name = "seahash"
+version = "4.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
+
[[package]]
name = "security-framework"
version = "2.9.2"
@@ -5220,9 +5618,9 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
[[package]]
name = "serde"
-version = "1.0.188"
+version = "1.0.193"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e"
+checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89"
dependencies = [
"serde_derive",
]
@@ -5240,22 +5638,22 @@ dependencies = [
[[package]]
name = "serde_derive"
-version = "1.0.188"
+version = "1.0.193"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2"
+checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
name = "serde_json"
-version = "1.0.107"
+version = "1.0.108"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65"
+checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b"
dependencies = [
- "indexmap 2.0.2",
+ "indexmap 2.1.0",
"itoa",
"ryu",
"serde",
@@ -5310,7 +5708,7 @@ checksum = "8725e1dfadb3a50f7e5ce0b1a540466f6ed3fe7a0fca2ac2b8b831d31316bd00"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -5336,15 +5734,15 @@ dependencies = [
[[package]]
name = "serde_with"
-version = "3.3.0"
+version = "3.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1ca3b16a3d82c4088f343b7480a93550b3eabe1a358569c2dfe38bbcead07237"
+checksum = "64cd236ccc1b7a29e7e2739f27c0b2dd199804abc4290e32f59f3b68d6405c23"
dependencies = [
- "base64 0.21.4",
+ "base64 0.21.5",
"chrono",
"hex",
"indexmap 1.9.3",
- "indexmap 2.0.2",
+ "indexmap 2.1.0",
"serde",
"serde_json",
"serde_with_macros",
@@ -5353,14 +5751,14 @@ dependencies = [
[[package]]
name = "serde_with_macros"
-version = "3.3.0"
+version = "3.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2e6be15c453eb305019bfa438b1593c731f36a289a7853f7707ee29e870b3b3c"
+checksum = "93634eb5f75a2323b16de4748022ac4297f9e76b6dced2be287a099f41b5e788"
dependencies = [
"darling 0.20.3",
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -5385,7 +5783,7 @@ checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -5401,17 +5799,6 @@ dependencies = [
"opaque-debug",
]
-[[package]]
-name = "sha-1"
-version = "0.10.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c"
-dependencies = [
- "cfg-if 1.0.0",
- "cpufeatures",
- "digest 0.10.7",
-]
-
[[package]]
name = "sha1"
version = "0.10.6"
@@ -5443,15 +5830,6 @@ dependencies = [
"lazy_static",
]
-[[package]]
-name = "shellexpand"
-version = "2.1.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7ccc8076840c4da029af4f87e4e8daeb0fca6b87bbb02e10cb60b791450e11e4"
-dependencies = [
- "dirs",
-]
-
[[package]]
name = "signal-hook"
version = "0.3.17"
@@ -5480,9 +5858,15 @@ dependencies = [
"futures-core",
"libc",
"signal-hook",
- "tokio 1.32.0",
+ "tokio 1.35.1",
]
+[[package]]
+name = "simdutf8"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f27f6278552951f1f2b8cf9da965d10969b2efdea95a6ec47987ab46edfe263a"
+
[[package]]
name = "simple_asn1"
version = "0.6.2"
@@ -5561,12 +5945,12 @@ dependencies = [
[[package]]
name = "socket2"
-version = "0.5.4"
+version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4031e820eb552adee9295814c0ced9e5cf38ddf1e8b7d566d6de8e2538ea989e"
+checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9"
dependencies = [
"libc",
- "windows-sys",
+ "windows-sys 0.48.0",
]
[[package]]
@@ -5602,7 +5986,7 @@ version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa8241483a83a3f33aa5fff7e7d9def398ff9990b2752b6c6112b83c6d246029"
dependencies = [
- "ahash 0.7.6",
+ "ahash 0.7.7",
"atoi",
"base64 0.13.1",
"bigdecimal",
@@ -5676,7 +6060,7 @@ checksum = "804d3f245f894e61b1e6263c84b23ca675d96753b5abfd5cc8597d86806e8024"
dependencies = [
"native-tls",
"once_cell",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"tokio-native-tls",
]
@@ -5711,7 +6095,7 @@ dependencies = [
"serde",
"serde_json",
"thiserror",
- "tokio 1.32.0",
+ "tokio 1.35.1",
]
[[package]]
@@ -5781,7 +6165,7 @@ dependencies = [
"proc-macro2",
"quote",
"rustversion",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -5803,15 +6187,27 @@ dependencies = [
[[package]]
name = "syn"
-version = "2.0.38"
+version = "2.0.48"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b"
+checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
+[[package]]
+name = "syn_derive"
+version = "0.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b"
+dependencies = [
+ "proc-macro-error",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.48",
+]
+
[[package]]
name = "sync_wrapper"
version = "0.1.2"
@@ -5857,6 +6253,12 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417"
+[[package]]
+name = "tap"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
+
[[package]]
name = "tempfile"
version = "3.8.0"
@@ -5866,8 +6268,8 @@ dependencies = [
"cfg-if 1.0.0",
"fastrand 2.0.1",
"redox_syscall 0.3.5",
- "rustix 0.38.17",
- "windows-sys",
+ "rustix",
+ "windows-sys 0.48.0",
]
[[package]]
@@ -5911,7 +6313,7 @@ dependencies = [
"proc-macro-error",
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -5923,7 +6325,7 @@ dependencies = [
"proc-macro-error",
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
"test-case-core",
]
@@ -5932,7 +6334,7 @@ name = "test_utils"
version = "0.1.0"
dependencies = [
"async-trait",
- "base64 0.21.4",
+ "base64 0.21.5",
"clap",
"masking",
"rand 0.8.5",
@@ -5943,7 +6345,7 @@ dependencies = [
"serial_test",
"thirtyfour",
"time",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"toml 0.7.4",
]
@@ -5968,7 +6370,7 @@ dependencies = [
"stringmatch",
"thirtyfour-macros",
"thiserror",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"url",
"urlparse",
]
@@ -6002,7 +6404,7 @@ checksum = "10712f02019e9288794769fba95cd6847df9874d49d871d062172f9dd41bc4cc"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -6028,9 +6430,9 @@ dependencies = [
[[package]]
name = "time"
-version = "0.3.22"
+version = "0.3.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ea9e1b3cf1243ae005d9e74085d4d542f3125458f3a81af210d901dcd7411efd"
+checksum = "59e399c068f43a5d116fedaf73b203fa4f9c519f17e2b34f63221d3792f81446"
dependencies = [
"itoa",
"serde",
@@ -6046,9 +6448,9 @@ checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb"
[[package]]
name = "time-macros"
-version = "0.2.9"
+version = "0.2.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "372950940a5f07bf38dbe211d7283c9e6d7327df53794992d293e534c733d09b"
+checksum = "96ba15a897f3c86766b757e5ac7221554c6750054d74d5b28844fce5fb36a6c4"
dependencies = [
"time-core",
]
@@ -6104,21 +6506,21 @@ dependencies = [
[[package]]
name = "tokio"
-version = "1.32.0"
+version = "1.35.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9"
+checksum = "c89b4efa943be685f629b149f53829423f8f5531ea21249408e8e2f8671ec104"
dependencies = [
"backtrace",
"bytes 1.5.0",
"libc",
- "mio 0.8.8",
+ "mio 0.8.10",
"num_cpus",
"parking_lot 0.12.1",
"pin-project-lite",
"signal-hook-registry",
- "socket2 0.5.4",
+ "socket2 0.5.5",
"tokio-macros",
- "windows-sys",
+ "windows-sys 0.48.0",
]
[[package]]
@@ -6181,18 +6583,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf"
dependencies = [
"pin-project-lite",
- "tokio 1.32.0",
+ "tokio 1.35.1",
]
[[package]]
name = "tokio-macros"
-version = "2.1.0"
+version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e"
+checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
]
[[package]]
@@ -6202,7 +6604,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2"
dependencies = [
"native-tls",
- "tokio 1.32.0",
+ "tokio 1.35.1",
]
[[package]]
@@ -6231,10 +6633,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59"
dependencies = [
"rustls 0.20.9",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"webpki",
]
+[[package]]
+name = "tokio-rustls"
+version = "0.24.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081"
+dependencies = [
+ "rustls 0.21.7",
+ "tokio 1.35.1",
+]
+
[[package]]
name = "tokio-stream"
version = "0.1.14"
@@ -6243,7 +6655,8 @@ checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842"
dependencies = [
"futures-core",
"pin-project-lite",
- "tokio 1.32.0",
+ "tokio 1.35.1",
+ "tokio-util",
]
[[package]]
@@ -6342,7 +6755,7 @@ dependencies = [
"futures-core",
"futures-sink",
"pin-project-lite",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"tracing",
]
@@ -6361,10 +6774,11 @@ version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6135d499e69981f9ff0ef2167955a5333c35e36f6937d382974566b3d5b94ec"
dependencies = [
+ "indexmap 1.9.3",
"serde",
"serde_spanned",
"toml_datetime",
- "toml_edit",
+ "toml_edit 0.19.14",
]
[[package]]
@@ -6378,17 +6792,28 @@ dependencies = [
[[package]]
name = "toml_edit"
-version = "0.19.10"
+version = "0.19.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2380d56e8670370eee6566b0bfd4265f65b3f432e8c6d85623f728d4fa31f739"
+checksum = "f8123f27e969974a3dfba720fdb560be359f57b44302d280ba72e76a74480e8a"
dependencies = [
- "indexmap 1.9.3",
+ "indexmap 2.1.0",
"serde",
"serde_spanned",
"toml_datetime",
"winnow",
]
+[[package]]
+name = "toml_edit"
+version = "0.20.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "396e4d48bbb2b7554c944bde63101b5ae446cff6ec4a24227428f15eb72ef338"
+dependencies = [
+ "indexmap 2.1.0",
+ "toml_datetime",
+ "winnow",
+]
+
[[package]]
name = "tonic"
version = "0.8.3"
@@ -6411,7 +6836,7 @@ dependencies = [
"pin-project",
"prost",
"prost-derive",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"tokio-stream",
"tokio-util",
"tower",
@@ -6434,7 +6859,7 @@ dependencies = [
"pin-project-lite",
"rand 0.8.5",
"slab",
- "tokio 1.32.0",
+ "tokio 1.35.1",
"tokio-util",
"tower-layer",
"tower-service",
@@ -6455,11 +6880,10 @@ checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52"
[[package]]
name = "tracing"
-version = "0.1.36"
+version = "0.1.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2fce9567bd60a67d08a16488756721ba392f24f29006402881e43b19aac64307"
+checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
dependencies = [
- "cfg-if 1.0.0",
"log",
"pin-project-lite",
"tracing-attributes",
@@ -6468,11 +6892,12 @@ dependencies = [
[[package]]
name = "tracing-actix-web"
-version = "0.7.8"
+version = "0.7.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a512ec11fae6c666707625e84f83e5d58f941e9ab15723289c0d380edfe48f09"
+checksum = "1fe0d5feac3f4ca21ba33496bcb1ccab58cca6412b1405ae80f0581541e0ca78"
dependencies = [
"actix-web",
+ "mutually_exclusive_features",
"opentelemetry",
"pin-project",
"tracing",
@@ -6493,20 +6918,20 @@ dependencies = [
[[package]]
name = "tracing-attributes"
-version = "0.1.22"
+version = "0.1.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "11c75893af559bc8e10716548bdef5cb2b983f8e637db9d0e15126b61b484ee2"
+checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [
"proc-macro2",
"quote",
- "syn 1.0.109",
+ "syn 2.0.48",
]
[[package]]
name = "tracing-core"
-version = "0.1.31"
+version = "0.1.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a"
+checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
dependencies = [
"once_cell",
"valuable",
@@ -6749,7 +7174,7 @@ version = "3.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d82b1bc5417102a73e8464c686eef947bdfb99fcdfc0a4f228e81afa9526470a"
dependencies = [
- "indexmap 2.0.2",
+ "indexmap 2.1.0",
"serde",
"serde_json",
"utoipa-gen",
@@ -6764,23 +7189,7 @@ dependencies = [
"proc-macro-error",
"proc-macro2",
"quote",
- "syn 2.0.38",
-]
-
-[[package]]
-name = "utoipa-swagger-ui"
-version = "3.1.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "84614caa239fb25b2bb373a52859ffd94605ceb256eeb1d63436325cf81e3653"
-dependencies = [
- "actix-web",
- "mime_guess",
- "regex",
- "rust-embed",
- "serde",
- "serde_json",
- "utoipa",
- "zip",
+ "syn 2.0.48",
]
[[package]]
@@ -6790,7 +7199,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d"
dependencies = [
"atomic",
- "getrandom 0.2.10",
+ "getrandom 0.2.11",
"serde",
]
@@ -6815,6 +7224,26 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
+[[package]]
+name = "vaultrs"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "28084ac780b443e7f3514df984a2933bd3ab39e71914d951cdf8e4d298a7c9bc"
+dependencies = [
+ "async-trait",
+ "bytes 1.5.0",
+ "derive_builder",
+ "http",
+ "reqwest",
+ "rustify",
+ "rustify_derive",
+ "serde",
+ "serde_json",
+ "thiserror",
+ "tracing",
+ "url",
+]
+
[[package]]
name = "vcpkg"
version = "0.2.15"
@@ -6913,7 +7342,7 @@ dependencies = [
"once_cell",
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
"wasm-bindgen-shared",
]
@@ -6947,7 +7376,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.38",
+ "syn 2.0.48",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
@@ -7006,6 +7435,12 @@ dependencies = [
"webpki",
]
+[[package]]
+name = "webpki-roots"
+version = "0.25.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1778a42e8b3b90bff8d0f5032bf22250792889a5cdc752aa0020c84abe3aaf10"
+
[[package]]
name = "weezl"
version = "0.1.7"
@@ -7066,12 +7501,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
-name = "windows"
-version = "0.48.0"
+name = "windows-core"
+version = "0.51.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f"
+checksum = "f1f8cf84f35d2db49a46868f947758c7a1138116f7fac3bc844f43ade1292e64"
dependencies = [
- "windows-targets",
+ "windows-targets 0.48.5",
]
[[package]]
@@ -7080,7 +7515,16 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
dependencies = [
- "windows-targets",
+ "windows-targets 0.48.5",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
+dependencies = [
+ "windows-targets 0.52.0",
]
[[package]]
@@ -7089,13 +7533,28 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
dependencies = [
- "windows_aarch64_gnullvm",
- "windows_aarch64_msvc",
- "windows_i686_gnu",
- "windows_i686_msvc",
- "windows_x86_64_gnu",
- "windows_x86_64_gnullvm",
- "windows_x86_64_msvc",
+ "windows_aarch64_gnullvm 0.48.5",
+ "windows_aarch64_msvc 0.48.5",
+ "windows_i686_gnu 0.48.5",
+ "windows_i686_msvc 0.48.5",
+ "windows_x86_64_gnu 0.48.5",
+ "windows_x86_64_gnullvm 0.48.5",
+ "windows_x86_64_msvc 0.48.5",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd"
+dependencies = [
+ "windows_aarch64_gnullvm 0.52.0",
+ "windows_aarch64_msvc 0.52.0",
+ "windows_i686_gnu 0.52.0",
+ "windows_i686_msvc 0.52.0",
+ "windows_x86_64_gnu 0.52.0",
+ "windows_x86_64_gnullvm 0.52.0",
+ "windows_x86_64_msvc 0.52.0",
]
[[package]]
@@ -7104,47 +7563,89 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea"
+
[[package]]
name = "windows_aarch64_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef"
+
[[package]]
name = "windows_i686_gnu"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
+[[package]]
+name = "windows_i686_gnu"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313"
+
[[package]]
name = "windows_i686_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
+[[package]]
+name = "windows_i686_msvc"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a"
+
[[package]]
name = "windows_x86_64_gnu"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd"
+
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e"
+
[[package]]
name = "windows_x86_64_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04"
+
[[package]]
name = "winnow"
-version = "0.4.11"
+version = "0.5.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "656953b22bcbfb1ec8179d60734981d1904494ecc91f8a3f0ee5c7389bb8eb4b"
+checksum = "829846f3e3db426d4cee4510841b71a8e58aa2a76b1132579487ae430ccd9c7b"
dependencies = [
"memchr",
]
@@ -7156,18 +7657,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1"
dependencies = [
"cfg-if 1.0.0",
- "windows-sys",
+ "windows-sys 0.48.0",
]
[[package]]
name = "wiremock"
-version = "0.5.19"
+version = "0.5.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c6f71803d3a1c80377a06221e0530be02035d5b3e854af56c6ece7ac20ac441d"
+checksum = "bd7b0b5b253ebc0240d6aac6dd671c495c467420577bf634d3064ae7e6fa2b4c"
dependencies = [
"assert-json-diff",
"async-trait",
- "base64 0.21.4",
+ "base64 0.21.5",
"deadpool",
"futures 0.3.28",
"futures-timer",
@@ -7178,7 +7679,7 @@ dependencies = [
"regex",
"serde",
"serde_json",
- "tokio 1.32.0",
+ "tokio 1.35.1",
]
[[package]]
@@ -7191,6 +7692,15 @@ dependencies = [
"winapi-build",
]
+[[package]]
+name = "wyz"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed"
+dependencies = [
+ "tap",
+]
+
[[package]]
name = "x509-parser"
version = "0.15.1"
@@ -7224,23 +7734,31 @@ dependencies = [
]
[[package]]
-name = "zeroize"
-version = "1.6.0"
+name = "zerocopy"
+version = "0.7.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9"
+checksum = "1c4061bedbb353041c12f413700357bec76df2c7e2ca8e4df8bac24c6bf68e3d"
+dependencies = [
+ "zerocopy-derive",
+]
[[package]]
-name = "zip"
-version = "0.6.6"
+name = "zerocopy-derive"
+version = "0.7.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261"
+checksum = "b3c129550b3e6de3fd0ba67ba5c81818f9805e58b8d7fee80a3a59d2c9fc601a"
dependencies = [
- "byteorder",
- "crc32fast",
- "crossbeam-utils 0.8.16",
- "flate2",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.48",
]
+[[package]]
+name = "zeroize"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9"
+
[[package]]
name = "zstd"
version = "0.12.4"
diff --git a/Dockerfile b/Dockerfile
index 8eb321dd2afd..e9591e5e9f27 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-FROM rust:slim-bookworm as builder
+FROM rust:bookworm as builder
ARG EXTRA_FEATURES=""
@@ -36,7 +36,7 @@ RUN cargo build --release --features release ${EXTRA_FEATURES}
-FROM debian:bookworm-slim
+FROM debian:bookworm
# Placing config and binary executable in different directories
ARG CONFIG_DIR=/local/config
diff --git a/Makefile b/Makefile
index abe0dd50b145..a39fc4c22673 100644
--- a/Makefile
+++ b/Makefile
@@ -9,6 +9,10 @@
eq = $(if $(or $(1),$(2)),$(and $(findstring $(1),$(2)),\
$(findstring $(2),$(1))),1)
+
+ROOT_DIR_WITH_SLASH := $(dir $(realpath $(lastword $(MAKEFILE_LIST))))
+ROOT_DIR := $(realpath $(ROOT_DIR_WITH_SLASH))
+
#
# = Targets
#
@@ -30,11 +34,18 @@ eq = $(if $(or $(1),$(2)),$(and $(findstring $(1),$(2)),\
release
+# Check a local package and all of its dependencies for errors
+#
+# Usage :
+# make check
+check:
+ cargo check
+
+
# Compile application for running on local machine
#
# Usage :
# make build
-
build :
cargo build
@@ -67,6 +78,14 @@ fmt :
clippy :
cargo clippy --all-features --all-targets -- -D warnings
+# Build the DSL crate as a WebAssembly JS library
+#
+# Usage :
+# make euclid-wasm
+
+euclid-wasm:
+ wasm-pack build --target web --out-dir $(ROOT_DIR)/wasm --out-name euclid $(ROOT_DIR)/crates/euclid_wasm -- --features dummy_connector
+
# Run Rust tests of project.
#
# Usage :
@@ -93,4 +112,4 @@ precommit : fmt clippy test
hack:
- cargo hack check --workspace --each-feature --no-dev-deps
\ No newline at end of file
+ cargo hack check --workspace --each-feature --all-targets
diff --git a/README.md b/README.md
index 129a0512d4a0..0f5e924589f2 100644
--- a/README.md
+++ b/README.md
@@ -3,7 +3,6 @@
-
The open-source payments switch
@@ -11,14 +10,16 @@ The single API to access payment ecosystems across 130+ countries
Quick Start Guide •
+ Local Setup Guide •
Fast Integration for Stripe Users •
+ API Docs •
Supported Features •
- FAQs
What's Included •
Join us in building HyperSwitch •
Community •
Bugs and feature requests •
+ FAQs •
Versioning •
Copyright and License
@@ -35,7 +36,6 @@ The single API to access payment ecosystems across 130+ countries
-
@@ -57,17 +57,14 @@ Using Hyperswitch, you can:
⚡️ Quick Start Guide
- One-click deployment on AWS cloud
+### One-click deployment on AWS cloud
-The fastest and easiest way to try hyperswitch is via our CDK scripts
+The fastest and easiest way to try Hyperswitch is via our CDK scripts
1. Click on the following button for a quick standalone deployment on AWS, suitable for prototyping.
No code or setup is required in your system and the deployment is covered within the AWS free-tier setup.
- Click here if you have not bootstrapped your region before deploying
-
-
-
+
2. Sign-in to your AWS console.
@@ -75,12 +72,27 @@ The fastest and easiest way to try hyperswitch is via our CDK scripts
For an early access to the production-ready setup fill this Early Access Form
+### Run it on your system
+
+You can run Hyperswitch on your system using Docker Compose after cloning this repository:
+
+```shell
+docker compose up -d
+```
+
+This will start the payments router, the primary component within Hyperswitch.
+
+Check out the [local setup guide][local-setup-guide] for a more comprehensive
+setup, which includes the [scheduler and monitoring services][docker-compose-scheduler-monitoring].
+
+[local-setup-guide]: /docs/try_local_system.md
+[docker-compose-scheduler-monitoring]: /docs/try_local_system.md#run-the-scheduler-and-monitoring-services
+
🔌 Fast Integration for Stripe Users
-If you are already using Stripe, integrating with Hyperswitch is fun, fast &
-easy.
+If you are already using Stripe, integrating with Hyperswitch is fun, fast & easy.
Try the steps below to get a feel for how quick the setup is:
1. Get API keys from our [dashboard].
@@ -99,11 +111,9 @@ Try the steps below to get a feel for how quick the setup is:
As of Sept 2023, we support 50+ payment processors and multiple global payment methods.
In addition, we are continuously integrating new processors based on their reach and community requests.
Our target is to support 100+ processors by H2 2023.
-You can find the latest list of payment processors, supported methods, and
-features
-[here][supported-connectors-and-features].
+You can find the latest list of payment processors, supported methods, and features [here][supported-connectors-and-features].
-[supported-connectors-and-features]: https://docs.google.com/spreadsheets/d/e/2PACX-1vQWHLza9m5iO4Ol-tEBx22_Nnq8Mb3ISCWI53nrinIGLK8eHYmHGnvXFXUXEut8AFyGyI9DipsYaBLG/pubhtml?gid=0&single=true
+[supported-connectors-and-features]: https://hyperswitch.io/pm-list
### 🌟 Hosted Version
@@ -255,11 +265,11 @@ We welcome contributions from the community. Please read through our
Included are directions for opening issues, coding standards, and notes on
development.
-🦀 **Important note for Rust developers**: We aim for contributions from the community
-across a broad range of tracks. Hence, we have prioritised simplicity and code
-readability over purely idiomatic code. For example, some of the code in core
-functions (e.g., `payments_core`) is written to be more readable than
-pure-idiomatic.
+- We appreciate all types of contributions: code, documentation, demo creation, or some new way you want to contribute to us.
+ We will reward every contribution with a Hyperswitch branded t-shirt.
+- 🦀 **Important note for Rust developers**: We aim for contributions from the community across a broad range of tracks.
+ Hence, we have prioritised simplicity and code readability over purely idiomatic code.
+ For example, some of the code in core functions (e.g., `payments_core`) is written to be more readable than pure-idiomatic.
@@ -267,12 +277,10 @@ pure-idiomatic.
Get updates on Hyperswitch development and chat with the community:
-- Read and subscribe to [the official Hyperswitch blog][blog].
-- Join our [Discord server][discord].
-- Join our [Slack workspace][slack].
-- Ask and explore our [GitHub Discussions][github-discussions].
+- [Discord server][discord] for questions related to contributing to hyperswitch, questions about the architecture, components, etc.
+- [Slack workspace][slack] for questions related to integrating hyperswitch, integrating a connector in hyperswitch, etc.
+- [GitHub Discussions][github-discussions] to drop feature requests or suggest anything payments-related you need for your stack.
-[blog]: https://hyperswitch.io/blog
[discord]: https://discord.gg/wJZ7DVW8mm
[slack]: https://join.slack.com/t/hyperswitch-io/shared_invite/zt-1k6cz4lee-SAJzhz6bjmpp4jZCDOtOIg
[github-discussions]: https://github.com/juspay/hyperswitch/discussions
@@ -317,7 +325,6 @@ Check the [CHANGELOG.md](./CHANGELOG.md) file for details.
This product is licensed under the [Apache 2.0 License](LICENSE).
-
✨ Thanks to all contributors
diff --git a/add_connector.md b/add_connector.md
index da09ae0024e7..7fc3dcb27d14 100644
--- a/add_connector.md
+++ b/add_connector.md
@@ -9,6 +9,14 @@ This is a guide to contributing new connector to Router. This guide includes ins
- Understanding of the Connector APIs which you wish to integrate with Router
- Setup of Router repository and running it on local
- Access to API credentials for testing the Connector API (you can quickly sign up for sandbox/uat credentials by visiting the website of the connector you wish to integrate)
+- Ensure that you have the nightly toolchain installed because the connector template script includes code formatting.
+
+ Install it using `rustup`:
+
+ ```bash
+ rustup toolchain install nightly
+ ```
+
In Router, there are Connectors and Payment Methods, examples of both are shown below from which the difference is apparent.
@@ -17,22 +25,17 @@ In Router, there are Connectors and Payment Methods, examples of both are shown
A connector is an integration to fulfill payments. Related use cases could be any of the below
- Payment processor (Stripe, Adyen, ChasePaymentTech etc.,)
-- Fraud and Risk management platform (like Ravelin, Riskified etc.,)
+- Fraud and Risk management platform (like Signifyd, Riskified etc.,)
- Payment network (Visa, Master)
- Payment authentication services (Cardinal etc.,)
- Router supports "Payment Processors" right now. Support will be extended to the other categories in the near future.
+Currently, the router is compatible with 'Payment Processors' and 'Fraud and Risk Management' platforms. Support for additional categories will be expanded in the near future.
### What is a Payment Method ?
-Each Connector (say, a Payment Processor) could support multiple payment methods
+Every Payment Processor has the capability to accommodate various payment methods. Refer to the [Hyperswitch Payment matrix](https://hyperswitch.io/pm-list) to discover the supported processors and payment methods.
-- **Cards :** Bancontact , Knet, Mada
-- **Bank Transfers :** EPS , giropay, sofort
-- **Bank Direct Debit :** Sepa direct debit
-- **Wallets :** Apple Pay , Google Pay , Paypal
-
-Cards and Bank Transfer payment methods are already included in Router. Hence, adding a new connector which offers payment_methods available in Router is easy and requires almost no breaking changes.
-Adding a new payment method (say Wallets or Bank Direct Debit) might require some changes in core business logic of Router, which we are actively working upon.
+The above mentioned payment methods are already included in Router. Hence, adding a new connector which offers payment_methods available in Router is easy and requires almost no breaking changes.
+Adding a new payment method might require some changes in core business logic of Router, which we are actively working upon.
## How to Integrate a Connector
@@ -46,8 +49,7 @@ Below is a step-by-step tutorial for integrating a new connector.
### **Generate the template**
```bash
-cd scripts
-bash add_connector.sh
+sh scripts/add_connector.sh
```
For this tutorial `` would be `checkout`.
@@ -81,50 +83,59 @@ For example, in case of checkout, the [request](https://api-reference.checkout.c
Now let's implement Request type for checkout
-```rust
-#[derive(Debug, Serialize)]
-pub struct CheckoutPaymentsRequest {
- pub source: Source,
- pub amount: i64,
- pub currency: String,
- #[serde(default = "generate_processing_channel_id")]
- pub processing_channel_id: Cow<'static, str>,
-}
-
-fn generate_processing_channel_id() -> Cow<'static, str> {
- "pc_e4mrdrifohhutfurvuawughfwu".into()
-}
-```
-
-Since Router is connector agnostic, only minimal data is sent to connector and optional fields may be ignored.
-
-Here processing_channel_id, is specific to checkout and implementations of such functions should be inside the checkout directory.
-Let's define `Source`
-
```rust
#[derive(Debug, Serialize)]
pub struct CardSource {
#[serde(rename = "type")]
- pub source_type: Option,
- pub number: Option,
- pub expiry_month: Option,
- pub expiry_year: Option,
+ pub source_type: CheckoutSourceTypes,
+ pub number: cards::CardNumber,
+ pub expiry_month: Secret,
+ pub expiry_year: Secret,
+ pub cvv: Secret,
}
#[derive(Debug, Serialize)]
#[serde(untagged)]
-pub enum Source {
+pub enum PaymentSource {
Card(CardSource),
- // TODO: Add other sources here.
+ Wallets(WalletSource),
+ ApplePayPredecrypt(Box),
+}
+
+#[derive(Debug, Serialize)]
+pub struct PaymentsRequest {
+ pub source: PaymentSource,
+ pub amount: i64,
+ pub currency: String,
+ pub processing_channel_id: Secret,
+ #[serde(rename = "3ds")]
+ pub three_ds: CheckoutThreeDS,
+ #[serde(flatten)]
+ pub return_url: ReturnUrl,
+ pub capture: bool,
+ pub reference: String,
}
```
-`Source` is an enum type. Request types will need to derive `Serialize` and response types will need to derive `Deserialize`. For request types `From` needs to be implemented.
+Since Router is connector agnostic, only minimal data is sent to connector and optional fields may be ignored.
+
+Here processing_channel_id, is specific to checkout and implementations of such functions should be inside the checkout directory.
+Let's define `PaymentSource`
+
+`PaymentSource` is an enum type. Request types will need to derive `Serialize` and response types will need to derive `Deserialize`. For request types `From` needs to be implemented.
+For request types that involve an amount, the implementation of `TryFrom<&ConnectorRouterData<&T>>` is required:
+
+```rust
+impl TryFrom<&CheckoutRouterData<&T>> for PaymentsRequest
+```
+else
```rust
-impl<'a> From<&types::RouterData<'a>> for CheckoutRequestType
+impl TryFrom for PaymentsRequest
```
+where `T` is a generic type which can be `types::PaymentsAuthorizeRouterData`, `types::PaymentsCaptureRouterData`, etc.
+
In this impl block we build the request type from RouterData which will almost always contain all the required information you need for payment processing.
`RouterData` contains all the information required for processing the payment.
@@ -165,39 +176,56 @@ While implementing the Response Type, the important Enum to be defined for every
It stores the different status types that the connector can give in its response that is listed in its API spec. Below is the definition for checkout
```rust
-#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
+#[derive(Default, Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub enum CheckoutPaymentStatus {
Authorized,
+ #[default]
Pending,
#[serde(rename = "Card Verified")]
CardVerified,
Declined,
+ Captured,
}
```
The important part is mapping it to the Router status codes.
```rust
-impl From for enums::AttemptStatus {
- fn from(item: CheckoutPaymentStatus) -> Self {
- match item {
- CheckoutPaymentStatus::Authorized => enums::AttemptStatus::Charged,
- CheckoutPaymentStatus::Declined => enums::AttemptStatus::Failure,
- CheckoutPaymentStatus::Pending => enums::AttemptStatus::Authorizing,
- CheckoutPaymentStatus::CardVerified => enums::AttemptStatus::Pending,
+impl ForeignFrom<(CheckoutPaymentStatus, Option)> for enums::AttemptStatus {
+ fn foreign_from(item: (CheckoutPaymentStatus, Option)) -> Self {
+ let (status, balances) = item;
+
+ match status {
+ CheckoutPaymentStatus::Authorized => {
+ if let Some(Balances {
+ available_to_capture: 0,
+ }) = balances
+ {
+ Self::Charged
+ } else {
+ Self::Authorized
+ }
+ }
+ CheckoutPaymentStatus::Captured => Self::Charged,
+ CheckoutPaymentStatus::Declined => Self::Failure,
+ CheckoutPaymentStatus::Pending => Self::AuthenticationPending,
+ CheckoutPaymentStatus::CardVerified => Self::Pending,
}
}
}
```
+If you're converting ConnectorPaymentStatus to AttemptStatus without any additional conditions, you can employ the `impl From for enums::AttemptStatus`.
-Note: `enum::AttemptStatus` is Router status.
+Note: A payment intent can have multiple payment attempts. `enums::AttemptStatus` represents the status of a payment attempt.
-Router status are given below
+Some of the attempt status are given below
-- **Charged :** The amount has been debited
-- **PendingVBV :** Pending but verified by visa
-- **Failure :** The payment Failed
-- **Authorizing :** In the process of authorizing.
+- **Charged :** The payment attempt has succeeded.
+- **Pending :** Payment is in processing state.
+- **Failure :** The payment attempt has failed.
+- **Authorized :** Payment is authorized. Authorized payment can be voided, captured and partial captured.
+- **AuthenticationPending :** Customer action is required.
+- **Voided :** The payment was voided and never captured; the funds were returned to the customer.
It is highly recommended that the default status is Pending. Only explicit failure and explicit success from the connector shall be marked as success or failure respectively.
@@ -213,26 +241,119 @@ impl Default for CheckoutPaymentStatus {
Below is rest of the response type implementation for checkout
```rust
-
-#[derive(Default, Debug, Clone, Serialize, Deserialize, PartialEq)]
-pub struct CheckoutPaymentsResponse {
+#[derive(Clone, Debug, Default, Eq, PartialEq, Deserialize, Serialize)]
+pub struct PaymentsResponse {
id: String,
- amount: i64,
+ amount: Option,
+ action_id: Option,
status: CheckoutPaymentStatus,
+ #[serde(rename = "_links")]
+ links: Links,
+ balances: Option,
+ reference: Option,
+ response_code: Option,
+ response_summary: Option,
}
-impl<'a> From> for types::RouterData<'a> {
- fn from(item: types::ResponseRouterData<'a, CheckoutPaymentsResponse>) -> Self {
- types::RouterData {
- connector_transaction_id: Some(item.response.id),
- amount_received: Some(item.response.amount),
- status: enums::Status::from(item.response.status),
+#[derive(Deserialize, Debug)]
+pub struct ActionResponse {
+ #[serde(rename = "id")]
+ pub action_id: String,
+ pub amount: i64,
+ #[serde(rename = "type")]
+ pub action_type: ActionType,
+ pub approved: Option,
+ pub reference: Option,
+}
+
+#[derive(Debug, Deserialize)]
+#[serde(untagged)]
+pub enum PaymentsResponseEnum {
+ ActionResponse(Vec),
+ PaymentResponse(Box),
+}
+
+impl TryFrom>
+ for types::PaymentsAuthorizeRouterData
+{
+ type Error = error_stack::Report;
+ fn try_from(
+ item: types::PaymentsResponseRouterData,
+ ) -> Result {
+ let redirection_data = item.response.links.redirect.map(|href| {
+ services::RedirectForm::from((href.redirection_url, services::Method::Get))
+ });
+ let status = enums::AttemptStatus::foreign_from((
+ item.response.status,
+ item.data.request.capture_method,
+ ));
+ let error_response = if status == enums::AttemptStatus::Failure {
+ Some(types::ErrorResponse {
+ status_code: item.http_code,
+ code: item
+ .response
+ .response_code
+ .unwrap_or_else(|| consts::NO_ERROR_CODE.to_string()),
+ message: item
+ .response
+ .response_summary
+ .clone()
+ .unwrap_or_else(|| consts::NO_ERROR_MESSAGE.to_string()),
+ reason: item.response.response_summary,
+ attempt_status: None,
+ connector_transaction_id: None,
+ })
+ } else {
+ None
+ };
+ let payments_response_data = types::PaymentsResponseData::TransactionResponse {
+ resource_id: types::ResponseId::ConnectorTransactionId(item.response.id.clone()),
+ redirection_data,
+ mandate_reference: None,
+ connector_metadata: None,
+ network_txn_id: None,
+ connector_response_reference_id: Some(
+ item.response.reference.unwrap_or(item.response.id),
+ ),
+ };
+ Ok(Self {
+ status,
+ response: error_response.map_or_else(|| Ok(payments_response_data), Err),
..item.data
- }
+ })
}
}
```
+Using an enum for a response struct in Rust is not recommended due to potential deserialization issues where the deserializer attempts to deserialize into all the enum variants. A preferable alternative is to employ a separate enum for the possible response variants and include it as a field within the response struct.
+
+Some recommended fields that needs to be set on connector request and response
+
+- **connector_request_reference_id :** Most of the connectors anticipate merchants to include their own reference ID in payment requests. For instance, the merchant's reference ID in the checkout `PaymentRequest` is specified as `reference`.
+
+```rust
+ reference: item.router_data.connector_request_reference_id.clone(),
+```
+- **connector_response_reference_id :** Merchants might face ambiguity when deciding which ID to use in the connector dashboard for payment identification. It is essential to populate the connector_response_reference_id with the appropriate reference ID, allowing merchants to recognize the transaction. This field can be linked to either `merchant_reference` or `connector_transaction_id`, depending on the field that the connector dashboard search functionality supports.
+
+```rust
+ connector_response_reference_id: item.response.reference.or(Some(item.response.id))
+```
+
+- **resource_id :** The connector assigns an identifier to a payment attempt, referred to as `connector_transaction_id`. This identifier is represented as an enum variant for the `resource_id`. If the connector does not provide a `connector_transaction_id`, the resource_id is set to `NoResponseId`.
+
+```rust
+ resource_id: types::ResponseId::ConnectorTransactionId(item.response.id.clone()),
+```
+- **redirection_data :** For the implementation of a redirection flow (3D Secure, bank redirects, etc.), assign the redirection link to the `redirection_data`.
+
+```rust
+ let redirection_data = item.response.links.redirect.map(|href| {
+ services::RedirectForm::from((href.redirection_url, services::Method::Get))
+ });
+```
+
+
And finally the error type implementation
```rust
@@ -251,23 +372,250 @@ Similarly for every API endpoint you can implement request and response types.
The `mod.rs` file contains the trait implementations where we use the types in transformers.
-There are four types of tasks that are done by implementing traits:
-
-- **Payment :** For making/initiating payments
-- **PaymentSync :** For checking status of the payment
-- **Refund :** For initiating refund
-- **RefundSync :** For checking status of the Refund.
-
We create a struct with the connector name and have trait implementations for it.
The following trait implementations are mandatory
-- **ConnectorCommon :** contains common description of the connector, like the base endpoint, content-type, error message, id.
-- **Payment :** Trait Relationship, has impl block.
-- **PaymentAuthorize :** Trait Relationship, has impl block.
-- **ConnectorIntegration :** For every api endpoint contains the url, using request transform and response transform and headers.
-- **Refund :** Trait Relationship, has empty body.
-- **RefundExecute :** Trait Relationship, has empty body.
-- **RefundSync :** Trait Relationship, has empty body.
+**ConnectorCommon :** contains common description of the connector, like the base endpoint, content-type, error response handling, id, currency unit.
+
+Within the `ConnectorCommon` trait, you'll find the following methods :
+
+ - `id` method corresponds directly to the connector name.
+ ```rust
+ fn id(&self) -> &'static str {
+ "checkout"
+ }
+ ```
+ - `get_currency_unit` method anticipates you to [specify the accepted currency unit](#set-the-currency-unit) for the connector.
+ ```rust
+ fn get_currency_unit(&self) -> api::CurrencyUnit {
+ api::CurrencyUnit::Minor
+ }
+ ```
+ - `common_get_content_type` method requires you to provide the accepted content type for the connector API.
+ ```rust
+ fn common_get_content_type(&self) -> &'static str {
+ "application/json"
+ }
+ ```
+ - `get_auth_header` method accepts common HTTP Authorization headers that are accepted in all `ConnectorIntegration` flows.
+ ```rust
+ fn get_auth_header(
+ &self,
+ auth_type: &types::ConnectorAuthType,
+ ) -> CustomResult)>, errors::ConnectorError> {
+ let auth: checkout::CheckoutAuthType = auth_type
+ .try_into()
+ .change_context(errors::ConnectorError::FailedToObtainAuthType)?;
+ Ok(vec![(
+ headers::AUTHORIZATION.to_string(),
+ format!("Bearer {}", auth.api_secret.peek()).into_masked(),
+ )])
+ }
+ ```
+
+ - `base_url` method is for fetching the base URL of connector's API. Base url needs to be consumed from configs.
+ ```rust
+ fn base_url<'a>(&self, connectors: &'a settings::Connectors) -> &'a str {
+ connectors.checkout.base_url.as_ref()
+ }
+ ```
+ - `build_error_response` method is common error response handling for a connector if it is same in all cases
+
+ ```rust
+ fn build_error_response(
+ &self,
+ res: types::Response,
+ ) -> CustomResult {
+ let response: checkout::ErrorResponse = if res.response.is_empty() {
+ let (error_codes, error_type) = if res.status_code == 401 {
+ (
+ Some(vec!["Invalid api key".to_string()]),
+ Some("invalid_api_key".to_string()),
+ )
+ } else {
+ (None, None)
+ };
+ checkout::ErrorResponse {
+ request_id: None,
+ error_codes,
+ error_type,
+ }
+ } else {
+ res.response
+ .parse_struct("ErrorResponse")
+ .change_context(errors::ConnectorError::ResponseDeserializationFailed)?
+ };
+
+ router_env::logger::info!(error_response=?response);
+ let errors_list = response.error_codes.clone().unwrap_or_default();
+ let option_error_code_message = conn_utils::get_error_code_error_message_based_on_priority(
+ self.clone(),
+ errors_list
+ .into_iter()
+ .map(|errors| errors.into())
+ .collect(),
+ );
+ Ok(types::ErrorResponse {
+ status_code: res.status_code,
+ code: option_error_code_message
+ .clone()
+ .map(|error_code_message| error_code_message.error_code)
+ .unwrap_or(consts::NO_ERROR_CODE.to_string()),
+ message: option_error_code_message
+ .map(|error_code_message| error_code_message.error_message)
+ .unwrap_or(consts::NO_ERROR_MESSAGE.to_string()),
+ reason: response
+ .error_codes
+ .map(|errors| errors.join(" & "))
+ .or(response.error_type),
+ attempt_status: None,
+ connector_transaction_id: None,
+ })
+ }
+ ```
+
+**ConnectorIntegration :** For every api endpoint contains the url, using request transform and response transform and headers.
+Within the `ConnectorIntegration` trait, you'll find the following methods implemented(below mentioned is example for authorized flow):
+
+- `get_url` method defines endpoint for authorize flow, base url is consumed from `ConnectorCommon` trait.
+
+```rust
+ fn get_url(
+ &self,
+ _req: &types::PaymentsAuthorizeRouterData,
+ connectors: &settings::Connectors,
+ ) -> CustomResult {
+ Ok(format!("{}{}", self.base_url(connectors), "payments"))
+ }
+```
+- `get_headers` method accepts HTTP headers that are accepted for authorize flow. In this context, it is utilized from the `ConnectorCommonExt` trait, as the connector adheres to common headers across various flows.
+
+```rust
+ fn get_headers(
+ &self,
+ req: &types::PaymentsAuthorizeRouterData,
+ connectors: &settings::Connectors,
+ ) -> CustomResult)>, errors::ConnectorError> {
+ self.build_headers(req, connectors)
+ }
+```
+
+- `get_request_body` method calls transformers where hyperswitch payment request data is transformed into connector payment request. For constructing the request body have a function `log_and_get_request_body` that allows generic argument which is the struct that is passed as the body for connector integration, and a function that can be use to encode it into String. We log the request in this function, as the struct will be intact and the masked values will be masked.
+
+```rust
+ fn get_request_body(
+ &self,
+ req: &types::PaymentsAuthorizeRouterData,
+ _connectors: &settings::Connectors,
+ ) -> CustomResult, errors::ConnectorError> {
+ let connector_router_data = checkout::CheckoutRouterData::try_from((
+ &self.get_currency_unit(),
+ req.request.currency,
+ req.request.amount,
+ req,
+ ))?;
+ let connector_req = checkout::PaymentsRequest::try_from(&connector_router_data)?;
+ let checkout_req = types::RequestBody::log_and_get_request_body(
+ &connector_req,
+ utils::Encode::::encode_to_string_of_json,
+ )
+ .change_context(errors::ConnectorError::RequestEncodingFailed)?;
+ Ok(Some(checkout_req))
+ }
+```
+
+- `build_request` method assembles the API request by providing the method, URL, headers, and request body as parameters.
+```rust
+ fn build_request(
+ &self,
+ req: &types::RouterData<
+ api::Authorize,
+ types::PaymentsAuthorizeData,
+ types::PaymentsResponseData,
+ >,
+ connectors: &settings::Connectors,
+ ) -> CustomResult, errors::ConnectorError> {
+ Ok(Some(
+ services::RequestBuilder::new()
+ .method(services::Method::Post)
+ .url(&types::PaymentsAuthorizeType::get_url(
+ self, req, connectors,
+ )?)
+ .attach_default_headers()
+ .headers(types::PaymentsAuthorizeType::get_headers(
+ self, req, connectors,
+ )?)
+ .body(types::PaymentsAuthorizeType::get_request_body(
+ self, req, connectors,
+ )?)
+ .build(),
+ ))
+ }
+```
+- `handle_response` method calls transformers where connector response data is transformed into hyperswitch response.
+```rust
+ fn handle_response(
+ &self,
+ data: &types::PaymentsAuthorizeRouterData,
+ res: types::Response,
+ ) -> CustomResult {
+ let response: checkout::PaymentsResponse = res
+ .response
+ .parse_struct("PaymentIntentResponse")
+ .change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
+ types::RouterData::try_from(types::ResponseRouterData {
+ response,
+ data: data.clone(),
+ http_code: res.status_code,
+ })
+ .change_context(errors::ConnectorError::ResponseHandlingFailed)
+ }
+```
+- `get_error_response` method to manage error responses. As the handling of checkout errors remains consistent across various flows, we've incorporated it from the `build_error_response` method within the `ConnectorCommon` trait.
+```rust
+ fn get_error_response(
+ &self,
+ res: types::Response,
+ ) -> CustomResult {
+ self.build_error_response(res)
+ }
+```
+**ConnectorCommonExt :** An enhanced trait for `ConnectorCommon` that enables functions with a generic type. This trait includes the `build_headers` method, responsible for constructing both the common headers and the Authorization headers (retrieved from the `get_auth_header` method), returning them as a vector.
+
+```rust
+ where
+ Self: ConnectorIntegration,
+{
+ fn build_headers(
+ &self,
+ req: &types::RouterData,
+ _connectors: &settings::Connectors,
+ ) -> CustomResult)>, errors::ConnectorError> {
+ let header = vec![(
+ headers::CONTENT_TYPE.to_string(),
+ self.get_content_type().to_string().into(),
+ )];
+ let mut api_key = self.get_auth_header(&req.connector_auth_type)?;
+ header.append(&mut api_key);
+ Ok(header)
+ }
+}
+```
+
+**Payment :** This trait includes several other traits and is meant to represent the functionality related to payments.
+
+**PaymentAuthorize :** This trait extends the `api::ConnectorIntegration `trait with specific types related to payment authorization.
+
+**PaymentCapture :** This trait extends the `api::ConnectorIntegration `trait with specific types related to manual payment capture.
+
+**PaymentSync :** This trait extends the `api::ConnectorIntegration `trait with specific types related to payment retrieve.
+
+**Refund :** This trait includes several other traits and is meant to represent the functionality related to Refunds.
+
+**RefundExecute :** This trait extends the `api::ConnectorIntegration `trait with specific types related to refunds create.
+
+**RefundSync :** This trait extends the `api::ConnectorIntegration `trait with specific types related to refunds retrieve.
+
And the below derive traits
@@ -277,13 +625,105 @@ And the below derive traits
There is a trait bound to implement refunds, if you don't want to implement refunds you can mark them as `todo!()` but code panics when you initiate refunds then.
-Don’t forget to add logs lines in appropriate places.
Refer to other connector code for trait implementations. Mostly the rust compiler will guide you to do it easily.
Feel free to connect with us in case of any queries and if you want to confirm the status mapping.
+### **Set the currency Unit**
+The `get_currency_unit` function, part of the ConnectorCommon trait, enables connectors to specify their accepted currency unit as either `Base` or `Minor`. For instance, Paypal designates its currency in the base unit (for example, USD), whereas Hyperswitch processes amounts in the minor unit (for example, cents). If a connector accepts amounts in the base unit, conversion is required, as illustrated.
+
+``` rust
+impl
+ TryFrom<(
+ &types::api::CurrencyUnit,
+ types::storage::enums::Currency,
+ i64,
+ T,
+ )> for PaypalRouterData
+{
+ type Error = error_stack::Report;
+ fn try_from(
+ (currency_unit, currency, amount, item): (
+ &types::api::CurrencyUnit,
+ types::storage::enums::Currency,
+ i64,
+ T,
+ ),
+ ) -> Result {
+ let amount = utils::get_amount_as_string(currency_unit, amount, currency)?;
+ Ok(Self {
+ amount,
+ router_data: item,
+ })
+ }
+}
+```
+
+**Note:** Since the amount is being converted in the aforementioned `try_from`, it is necessary to retrieve amounts from `ConnectorRouterData` in all other `try_from` instances.
+
+### **Connector utility functions**
+
+In the `connector/utils.rs` file, you'll discover utility functions that aid in constructing connector requests and responses. We highly recommend using these helper functions for retrieving payment request fields, such as `get_billing_country`, `get_browser_info`, and `get_expiry_date_as_yyyymm`, as well as for validations, including `is_three_ds`, `is_auto_capture`, and more.
+
+```rust
+ let json_wallet_data: CheckoutGooglePayData =wallet_data.get_wallet_token_as_json()?;
+```
+
### **Test the connector**
-Try running the tests in `crates/router/tests/connectors/{{connector-name}}.rs`.
+The template code script generates a test file for the connector, containing 20 sanity tests. We anticipate that you will implement these tests when adding a new connector.
+
+```rust
+// Cards Positive Tests
+// Creates a payment using the manual capture flow (Non 3DS).
+#[serial_test::serial]
+#[actix_web::test]
+async fn should_only_authorize_payment() {
+ let response = CONNECTOR
+ .authorize_payment(payment_method_details(), get_default_payment_info())
+ .await
+ .expect("Authorize payment response");
+ assert_eq!(response.status, enums::AttemptStatus::Authorized);
+}
+```
+
+Utility functions for tests are also available at `tests/connector/utils`. These functions enable you to write tests with ease.
+
+```rust
+ /// For initiating payments when `CaptureMethod` is set to `Manual`
+ /// This doesn't complete the transaction, `PaymentsCapture` needs to be done manually
+ async fn authorize_payment(
+ &self,
+ payment_data: Option,
+ payment_info: Option,
+ ) -> Result> {
+ let integration = self.get_data().connector.get_connector_integration();
+ let mut request = self.generate_data(
+ types::PaymentsAuthorizeData {
+ confirm: true,
+ capture_method: Some(diesel_models::enums::CaptureMethod::Manual),
+ ..(payment_data.unwrap_or(PaymentAuthorizeType::default().0))
+ },
+ payment_info,
+ );
+ let tx: oneshot::Sender<()> = oneshot::channel().0;
+ let state = routes::AppState::with_storage(
+ Settings::new().unwrap(),
+ StorageImpl::PostgresqlTest,
+ tx,
+ Box::new(services::MockApiClient),
+ )
+ .await;
+ integration.execute_pretasks(&mut request, &state).await?;
+ Box::pin(call_connector(request, integration)).await
+ }
+```
+
+Prior to executing tests in the shell, ensure that the API keys are configured in `crates/router/tests/connectors/sample_auth.toml` and set the environment variable `CONNECTOR_AUTH_FILE_PATH` using the export command. Avoid pushing code with exposed API keys.
+
+```rust
+ export CONNECTOR_AUTH_FILE_PATH="/hyperswitch/crates/router/tests/connectors/sample_auth.toml"
+ cargo test --package router --test connectors -- checkout --test-threads=1
+```
All tests should pass and add appropriate tests for connector specific payment flows.
### **Build payment request and response from json schema**
diff --git a/config/config.example.toml b/config/config.example.toml
index 02eff1d42979..87999f0e9e93 100644
--- a/config/config.example.toml
+++ b/config/config.example.toml
@@ -21,37 +21,52 @@ idle_pool_connection_timeout = 90 # Timeout for idle pool connections (defaults
# Main SQL data store credentials
[master_database]
-username = "db_user" # DB Username
-password = "db_pass" # DB Password. Use base-64 encoded kms encrypted value here when kms is enabled
-host = "localhost" # DB Host
-port = 5432 # DB Port
-dbname = "hyperswitch_db" # Name of Database
-pool_size = 5 # Number of connections to keep open
-connection_timeout = 10 # Timeout for database connection in seconds
-queue_strategy = "Fifo" # Add the queue strategy used by the database bb8 client
+username = "db_user" # DB Username
+password = "db_pass" # DB Password. Use base-64 encoded kms encrypted value here when kms is enabled
+host = "localhost" # DB Host
+port = 5432 # DB Port
+dbname = "hyperswitch_db" # Name of Database
+pool_size = 5 # Number of connections to keep open
+connection_timeout = 10 # Timeout for database connection in seconds
+queue_strategy = "Fifo" # Add the queue strategy used by the database bb8 client
# Replica SQL data store credentials
[replica_database]
-username = "replica_user" # DB Username
-password = "db_pass" # DB Password. Use base-64 encoded kms encrypted value here when kms is enabled
-host = "localhost" # DB Host
-port = 5432 # DB Port
-dbname = "hyperswitch_db" # Name of Database
-pool_size = 5 # Number of connections to keep open
-connection_timeout = 10 # Timeout for database connection in seconds
-queue_strategy = "Fifo" # Add the queue strategy used by the database bb8 client
+username = "replica_user" # DB Username
+password = "db_pass" # DB Password. Use base-64 encoded kms encrypted value here when kms is enabled
+host = "localhost" # DB Host
+port = 5432 # DB Port
+dbname = "hyperswitch_db" # Name of Database
+pool_size = 5 # Number of connections to keep open
+connection_timeout = 10 # Timeout for database connection in seconds
+queue_strategy = "Fifo" # Add the queue strategy used by the database bb8 client
# Redis credentials
[redis]
host = "127.0.0.1"
port = 6379
-pool_size = 5 # Number of connections to keep open
-reconnect_max_attempts = 5 # Maximum number of reconnection attempts to make before failing. Set to 0 to retry forever.
-reconnect_delay = 5 # Delay between reconnection attempts, in milliseconds
-default_ttl = 300 # Default TTL for entries, in seconds
-default_hash_ttl = 900 # Default TTL for hashes entries, in seconds
-use_legacy_version = false # Resp protocol for fred crate (set this to true if using RESPv2 or redis version < 6)
-stream_read_count = 1 # Default number of entries to read from stream if not provided in stream read options
+pool_size = 5 # Number of connections to keep open
+reconnect_max_attempts = 5 # Maximum number of reconnection attempts to make before failing. Set to 0 to retry forever.
+reconnect_delay = 5 # Delay between reconnection attempts, in milliseconds
+default_ttl = 300 # Default TTL for entries, in seconds
+default_hash_ttl = 900 # Default TTL for hashes entries, in seconds
+use_legacy_version = false # Resp protocol for fred crate (set this to true if using RESPv2 or redis version < 6)
+stream_read_count = 1 # Default number of entries to read from stream if not provided in stream read options
+auto_pipeline = true # Whether or not the client should automatically pipeline commands across tasks when possible.
+disable_auto_backpressure = false # Whether or not to disable the automatic backpressure features when pipelining is enabled.
+max_in_flight_commands = 5000 # The maximum number of in-flight commands (per connection) before backpressure will be applied.
+default_command_timeout = 0 # An optional timeout to apply to all commands.
+max_feed_count = 200 # The maximum number of frames that will be fed to a socket before flushing.
+
+# This section provides configs for currency conversion api
+[forex_api]
+call_delay = 21600 # Api calls are made after every 6 hrs
+local_fetch_retry_count = 5 # Fetch from Local cache has retry count as 5
+local_fetch_retry_delay = 1000 # Retry delay for checking write condition
+api_timeout = 20000 # Api timeouts once it crosses 20000 ms
+api_key = "YOUR API KEY HERE" # Api key for making request to foreign exchange Api
+fallback_api_key = "YOUR API KEY" # Api key for the fallback service
+redis_lock_timeout = 26000 # Redis remains write locked for 26000 ms once the acquire_redis_lock is called
# Logging configuration. Logging can be either to file or console or both.
@@ -95,27 +110,27 @@ sampling_rate = 0.1 # decimal rate between 0.0
otel_exporter_otlp_endpoint = "http://localhost:4317" # endpoint to send metrics and traces to, can include port number
otel_exporter_otlp_timeout = 5000 # timeout (in milliseconds) for sending metrics and traces
use_xray_generator = false # Set this to true for AWS X-ray compatible traces
-route_to_trace = [ "*/confirm" ]
+route_to_trace = ["*/confirm"]
# This section provides some secret values.
[secrets]
-master_enc_key = "sample_key" # Master Encryption key used to encrypt merchant wise encryption key. Should be 32-byte long.
-admin_api_key = "test_admin" # admin API key for admin authentication. Only applicable when KMS is disabled.
-kms_encrypted_admin_api_key = "" # Base64-encoded (KMS encrypted) ciphertext of the admin_api_key. Only applicable when KMS is enabled.
-jwt_secret = "secret" # JWT secret used for user authentication. Only applicable when KMS is disabled.
-kms_encrypted_jwt_secret = "" # Base64-encoded (KMS encrypted) ciphertext of the jwt_secret. Only applicable when KMS is enabled.
-recon_admin_api_key = "recon_test_admin" # recon_admin API key for recon authentication. Only applicable when KMS is disabled.
-kms_encrypted_recon_admin_api_key = "" # Base64-encoded (KMS encrypted) ciphertext of the recon_admin_api_key. Only applicable when KMS is enabled
+master_enc_key = "sample_key" # Master Encryption key used to encrypt merchant wise encryption key. Should be 32-byte long.
+admin_api_key = "test_admin" # admin API key for admin authentication. Only applicable when KMS is disabled.
+kms_encrypted_admin_api_key = "" # Base64-encoded (KMS encrypted) ciphertext of the admin_api_key. Only applicable when KMS is enabled.
+jwt_secret = "secret" # JWT secret used for user authentication. Only applicable when KMS is disabled.
+kms_encrypted_jwt_secret = "" # Base64-encoded (KMS encrypted) ciphertext of the jwt_secret. Only applicable when KMS is enabled.
+recon_admin_api_key = "recon_test_admin" # recon_admin API key for recon authentication. Only applicable when KMS is disabled.
+kms_encrypted_recon_admin_api_key = "" # Base64-encoded (KMS encrypted) ciphertext of the recon_admin_api_key. Only applicable when KMS is enabled
# Locker settings contain details for accessing a card locker, a
# PCI Compliant storage entity which stores payment method information
# like card details
[locker]
-host = "" # Locker host
-host_rs = "" # Rust Locker host
-mock_locker = true # Emulate a locker locally using Postgres
-basilisk_host = "" # Basilisk host
-locker_signing_key_id = "1" # Key_id to sign basilisk hs locker
+host = "" # Locker host
+host_rs = "" # Rust Locker host
+mock_locker = true # Emulate a locker locally using Postgres
+locker_signing_key_id = "1" # Key_id to sign basilisk hs locker
+locker_enabled = true # Boolean to enable or disable saving cards in locker
[delayed_session_response]
connectors_with_delayed_session_response = "trustpay,payme" # List of connectors which has delayed session response
@@ -124,16 +139,9 @@ connectors_with_delayed_session_response = "trustpay,payme" # List of connectors
connectors_with_webhook_source_verification_call = "paypal" # List of connectors which has additional source verification api-call
[jwekey] # 4 priv/pub key pair
-locker_key_identifier1 = "" # key identifier for key rotation , should be same as basilisk
-locker_key_identifier2 = "" # key identifier for key rotation , should be same as basilisk
-locker_encryption_key1 = "" # public key 1 in pem format, corresponding private key in basilisk
-locker_encryption_key2 = "" # public key 2 in pem format, corresponding private key in basilisk
-locker_decryption_key1 = "" # private key 1 in pem format, corresponding public key in basilisk
-locker_decryption_key2 = "" # private key 2 in pem format, corresponding public key in basilisk
-vault_encryption_key = "" # public key in pem format, corresponding private key in basilisk-hs
+vault_encryption_key = "" # public key in pem format, corresponding private key in basilisk-hs
rust_locker_encryption_key = "" # public key in pem format, corresponding private key in rust locker
-vault_private_key = "" # private key in pem format, corresponding public key in basilisk-hs
-
+vault_private_key = "" # private key in pem format, corresponding public key in basilisk-hs
# Refund configuration
[refund]
@@ -201,10 +209,13 @@ payeezy.base_url = "https://api-cert.payeezy.com/"
payme.base_url = "https://sandbox.payme.io/"
paypal.base_url = "https://api-m.sandbox.paypal.com/"
payu.base_url = "https://secure.snd.payu.com/"
+placetopay.base_url = "https://test.placetopay.com/rest/gateway"
powertranz.base_url = "https://staging.ptranz.com/api/"
prophetpay.base_url = "https://ccm-thirdparty.cps.golf/"
rapyd.base_url = "https://sandboxapi.rapyd.net"
+riskified.base_url = "https://sandbox.riskified.com/api"
shift4.base_url = "https://api.shift4.com/"
+signifyd.base_url = "https://api.signifyd.com/"
square.base_url = "https://connect.squareupsandbox.com/"
square.secondary_base_url = "https://pci-connect.squareupsandbox.com/"
stax.base_url = "https://apiprod.fattlabs.com/"
@@ -234,11 +245,11 @@ adyen = { banks = "e_platby_vub,postova_banka,sporo_pay,tatra_pay,viamo" }
# Bank redirect configs for allowed banks through online_banking_poland payment method
[bank_config.online_banking_poland]
-adyen = { banks = "blik_psp,place_zipko,m_bank,pay_with_ing,santander_przelew24,bank_pekaosa,bank_millennium,pay_with_alior_bank,banki_spoldzielcze,pay_with_inteligo,bnp_paribas_poland,bank_nowy_sa,credit_agricole,pay_with_bos,pay_with_citi_handlowy,pay_with_plus_bank,toyota_bank,velo_bank,e_transfer_pocztowy24"}
+adyen = { banks = "blik_psp,place_zipko,m_bank,pay_with_ing,santander_przelew24,bank_pekaosa,bank_millennium,pay_with_alior_bank,banki_spoldzielcze,pay_with_inteligo,bnp_paribas_poland,bank_nowy_sa,credit_agricole,pay_with_bos,pay_with_citi_handlowy,pay_with_plus_bank,toyota_bank,velo_bank,e_transfer_pocztowy24" }
# Bank redirect configs for allowed banks through open_banking_uk payment method
[bank_config.open_banking_uk]
-adyen = { banks = "aib,bank_of_scotland,danske_bank,first_direct,first_trust,halifax,lloyds,monzo,nat_west,nationwide_bank,royal_bank_of_scotland,starling,tsb_bank,tesco_bank,ulster_bank,barclays,hsbc_bank,revolut,santander_przelew24,open_bank_success,open_bank_failure,open_bank_cancelled"}
+adyen = { banks = "aib,bank_of_scotland,danske_bank,first_direct,first_trust,halifax,lloyds,monzo,nat_west,nationwide_bank,royal_bank_of_scotland,starling,tsb_bank,tesco_bank,ulster_bank,barclays,hsbc_bank,revolut,santander_przelew24,open_bank_success,open_bank_failure,open_bank_cancelled" }
# Bank redirect configs for allowed banks through przelewy24 payment method
[bank_config.przelewy24]
@@ -287,6 +298,12 @@ lower_fetch_limit = 1800 # Lower limit for fetching entries from redis
lock_key = "PRODUCER_LOCKING_KEY" # The following keys defines the producer lock that is created in redis with
lock_ttl = 160 # the ttl being the expiry (in seconds)
+# Scheduler server configuration
+[scheduler.server]
+port = 3000 # Port on which the server will listen for incoming requests
+host = "127.0.0.1" # Host IP address to bind the server to
+workers = 1 # Number of actix workers to handle incoming requests concurrently
+
batch_size = 200 # Specifies the batch size the producer will push under a single entry in the redis queue
# Drainer configuration, which handles draining raw SQL queries from Redis streams to the SQL database
@@ -312,90 +329,108 @@ region = "" # The AWS region used by the KMS SDK for decrypting data.
# EmailClient configuration. Only applicable when the `email` feature flag is enabled.
[email]
-from_email = "notify@example.com" # Sender email
-aws_region = "" # AWS region used by AWS SES
-base_url = "" # Base url used when adding links that should redirect to self
+sender_email = "example@example.com" # Sender email
+aws_region = "" # AWS region used by AWS SES
+base_url = "" # Base url used when adding links that should redirect to self
+allowed_unverified_days = 1 # Number of days the api calls ( with jwt token ) can be made without verifying the email
+active_email_client = "SES" # The currently active email client
+
+# Configuration for aws ses, applicable when the active email client is SES
+[email.aws_ses]
+email_role_arn = "" # The amazon resource name ( arn ) of the role which has permission to send emails
+sts_role_session_name = "" # An identifier for the assumed role session, used to uniquely identify a session.
#tokenization configuration which describe token lifetime and payment method for specific connector
[tokenization]
stripe = { long_lived_token = false, payment_method = "wallet", payment_method_type = { type = "disable_only", list = "google_pay" } }
-checkout = { long_lived_token = false, payment_method = "wallet" }
-mollie = {long_lived_token = false, payment_method = "card"}
+checkout = { long_lived_token = false, payment_method = "wallet", apple_pay_pre_decrypt_flow = "network_tokenization" }
+mollie = { long_lived_token = false, payment_method = "card" }
stax = { long_lived_token = true, payment_method = "card,bank_debit" }
-square = {long_lived_token = false, payment_method = "card"}
+square = { long_lived_token = false, payment_method = "card" }
braintree = { long_lived_token = false, payment_method = "card" }
-gocardless = {long_lived_token = true, payment_method = "bank_debit"}
+gocardless = { long_lived_token = true, payment_method = "bank_debit" }
[temp_locker_enable_config]
-stripe = {payment_method = "bank_transfer"}
-nuvei = {payment_method = "card"}
-shift4 = {payment_method = "card"}
-bluesnap = {payment_method = "card"}
+stripe = { payment_method = "bank_transfer" }
+nuvei = { payment_method = "card" }
+shift4 = { payment_method = "card" }
+bluesnap = { payment_method = "card" }
+bankofamerica = { payment_method = "card" }
+cybersource = { payment_method = "card" }
+nmi = { payment_method = "card" }
+payme = {payment_method = "card" }
[dummy_connector]
-enabled = true # Whether dummy connector is enabled or not
-payment_ttl = 172800 # Time to live for dummy connector payment in redis
-payment_duration = 1000 # Fake delay duration for dummy connector payment
-payment_tolerance = 100 # Fake delay tolerance for dummy connector payment
-payment_retrieve_duration = 500 # Fake delay duration for dummy connector payment sync
-payment_retrieve_tolerance = 100 # Fake delay tolerance for dummy connector payment sync
-payment_complete_duration = 500 # Fake delay duration for dummy connector payment complete
-payment_complete_tolerance = 100 # Fake delay tolerance for dummy connector payment complete
-refund_ttl = 172800 # Time to live for dummy connector refund in redis
-refund_duration = 1000 # Fake delay duration for dummy connector refund
-refund_tolerance = 100 # Fake delay tolerance for dummy connector refund
-refund_retrieve_duration = 500 # Fake delay duration for dummy connector refund sync
-refund_retrieve_tolerance = 100 # Fake delay tolerance for dummy connector refund sync
-authorize_ttl = 36000 # Time to live for dummy connector authorize request in redis
+enabled = true # Whether dummy connector is enabled or not
+payment_ttl = 172800 # Time to live for dummy connector payment in redis
+payment_duration = 1000 # Fake delay duration for dummy connector payment
+payment_tolerance = 100 # Fake delay tolerance for dummy connector payment
+payment_retrieve_duration = 500 # Fake delay duration for dummy connector payment sync
+payment_retrieve_tolerance = 100 # Fake delay tolerance for dummy connector payment sync
+payment_complete_duration = 500 # Fake delay duration for dummy connector payment complete
+payment_complete_tolerance = 100 # Fake delay tolerance for dummy connector payment complete
+refund_ttl = 172800 # Time to live for dummy connector refund in redis
+refund_duration = 1000 # Fake delay duration for dummy connector refund
+refund_tolerance = 100 # Fake delay tolerance for dummy connector refund
+refund_retrieve_duration = 500 # Fake delay duration for dummy connector refund sync
+refund_retrieve_tolerance = 100 # Fake delay tolerance for dummy connector refund sync
+authorize_ttl = 36000 # Time to live for dummy connector authorize request in redis
assets_base_url = "https://www.example.com/" # Base url for dummy connector assets
default_return_url = "https://www.example.com/" # Default return url when no return url is passed while payment
slack_invite_url = "https://www.example.com/" # Slack invite url for hyperswitch
discord_invite_url = "https://www.example.com/" # Discord invite url for hyperswitch
[mandates.supported_payment_methods]
-card.credit = {connector_list = "stripe,adyen"} # Mandate supported payment method type and connector for card
-wallet.paypal = {connector_list = "adyen"} # Mandate supported payment method type and connector for wallets
-pay_later.klarna = {connector_list = "adyen"} # Mandate supported payment method type and connector for pay_later
-bank_debit.ach = { connector_list = "gocardless"} # Mandate supported payment method type and connector for bank_debit
-bank_debit.becs = { connector_list = "gocardless"} # Mandate supported payment method type and connector for bank_debit
-bank_debit.sepa = { connector_list = "gocardless"} # Mandate supported payment method type and connector for bank_debit
+card.credit = { connector_list = "stripe,adyen,cybersource" } # Mandate supported payment method type and connector for card
+wallet.paypal = { connector_list = "adyen" } # Mandate supported payment method type and connector for wallets
+pay_later.klarna = { connector_list = "adyen" } # Mandate supported payment method type and connector for pay_later
+bank_debit.ach = { connector_list = "gocardless" } # Mandate supported payment method type and connector for bank_debit
+bank_debit.becs = { connector_list = "gocardless" } # Mandate supported payment method type and connector for bank_debit
+bank_debit.sepa = { connector_list = "gocardless" } # Mandate supported payment method type and connector for bank_debit
+bank_redirect.ideal = {connector_list = "stripe,adyen,globalpay"} # Mandate supported payment method type and connector for bank_redirect
+bank_redirect.sofort = {connector_list = "stripe,adyen,globalpay"}
+wallet.apple_pay = { connector_list = "stripe,adyen,cybersource,noon" }
+
# Required fields info used while listing the payment_method_data
[required_fields.pay_later] # payment_method = "pay_later"
-afterpay_clearpay = {fields = {stripe = [ # payment_method_type = afterpay_clearpay, connector = "stripe"
- # Required fields vector with its respective display name in front-end and field_type
- { required_field = "shipping.address.first_name", display_name = "first_name", field_type = "text" },
- { required_field = "shipping.address.last_name", display_name = "last_name", field_type = "text" },
- { required_field = "shipping.address.country", display_name = "country", field_type = { drop_down = { options = [ "US", "IN" ] } } },
- ] } }
+afterpay_clearpay = { fields = { stripe = [ # payment_method_type = afterpay_clearpay, connector = "stripe"
+ # Required fields vector with its respective display name in front-end and field_type
+ { required_field = "shipping.address.first_name", display_name = "first_name", field_type = "text" },
+ { required_field = "shipping.address.last_name", display_name = "last_name", field_type = "text" },
+ { required_field = "shipping.address.country", display_name = "country", field_type = { drop_down = { options = [
+ "US",
+ "IN",
+ ] } } },
+] } }
[payouts]
-payout_eligibility = true # Defaults the eligibility of a payout method to true in case connector does not provide checks for payout eligibility
+payout_eligibility = true # Defaults the eligibility of a payout method to true in case connector does not provide checks for payout eligibility
[pm_filters.adyen]
-online_banking_fpx = {country = "MY", currency = "MYR"}
-online_banking_thailand = {country = "TH", currency = "THB"}
-touch_n_go = {country = "MY", currency = "MYR"}
-atome = {country = "MY,SG", currency = "MYR,SGD"}
-swish = {country = "SE", currency = "SEK"}
-permata_bank_transfer = {country = "ID", currency = "IDR"}
-bca_bank_transfer = {country = "ID", currency = "IDR"}
-bni_va = {country = "ID", currency = "IDR"}
-bri_va = {country = "ID", currency = "IDR"}
-cimb_va = {country = "ID", currency = "IDR"}
-danamon_va = {country = "ID", currency = "IDR"}
-mandiri_va = {country = "ID", currency = "IDR"}
-alfamart = {country = "ID", currency = "IDR"}
-indomaret = {country = "ID", currency = "IDR"}
-open_banking_uk = {country = "GB", currency = "GBP"}
-oxxo = {country = "MX", currency = "MXN"}
-pay_safe_card = {country = "AT,AU,BE,BR,BE,CA,HR,CY,CZ,DK,FI,FR,GE,DE,GI,HU,IS,IE,KW,LV,IE,LI,LT,LU,MT,MX,MD,ME,NL,NZ,NO,PY,PE,PL,PT,RO,SA,RS,SK,SI,ES,SE,CH,TR,UAE,UK,US,UY", currency = "EUR,AUD,BRL,CAD,CZK,DKK,GEL,GIP,HUF,ISK,KWD,CHF,MXN,MDL,NZD,NOK,PYG,PEN,PLN,RON,SAR,RSD,SEK,TRY,AED,GBP,USD,UYU"}
-seven_eleven = {country = "JP", currency = "JPY"}
-lawson = {country = "JP", currency = "JPY"}
-mini_stop = {country = "JP", currency = "JPY"}
-family_mart = {country = "JP", currency = "JPY"}
-seicomart = {country = "JP", currency = "JPY"}
-pay_easy = {country = "JP", currency = "JPY"}
+online_banking_fpx = { country = "MY", currency = "MYR" }
+online_banking_thailand = { country = "TH", currency = "THB" }
+touch_n_go = { country = "MY", currency = "MYR" }
+atome = { country = "MY,SG", currency = "MYR,SGD" }
+swish = { country = "SE", currency = "SEK" }
+permata_bank_transfer = { country = "ID", currency = "IDR" }
+bca_bank_transfer = { country = "ID", currency = "IDR" }
+bni_va = { country = "ID", currency = "IDR" }
+bri_va = { country = "ID", currency = "IDR" }
+cimb_va = { country = "ID", currency = "IDR" }
+danamon_va = { country = "ID", currency = "IDR" }
+mandiri_va = { country = "ID", currency = "IDR" }
+alfamart = { country = "ID", currency = "IDR" }
+indomaret = { country = "ID", currency = "IDR" }
+open_banking_uk = { country = "GB", currency = "GBP" }
+oxxo = { country = "MX", currency = "MXN" }
+pay_safe_card = { country = "AT,AU,BE,BR,BE,CA,HR,CY,CZ,DK,FI,FR,GE,DE,GI,HU,IS,IE,KW,LV,IE,LI,LT,LU,MT,MX,MD,ME,NL,NZ,NO,PY,PE,PL,PT,RO,SA,RS,SK,SI,ES,SE,CH,TR,AE,GB,US,UY", currency = "EUR,AUD,BRL,CAD,CZK,DKK,GEL,GIP,HUF,KWD,CHF,MXN,MDL,NZD,NOK,PYG,PEN,PLN,RON,SAR,RSD,SEK,TRY,AED,GBP,USD,UYU" }
+seven_eleven = { country = "JP", currency = "JPY" }
+lawson = { country = "JP", currency = "JPY" }
+mini_stop = { country = "JP", currency = "JPY" }
+family_mart = { country = "JP", currency = "JPY" }
+seicomart = { country = "JP", currency = "JPY" }
+pay_easy = { country = "JP", currency = "JPY" }
[pm_filters.zen]
credit = { not_available_flows = { capture_method = "manual" } }
@@ -414,18 +449,19 @@ credit = { currency = "USD" }
debit = { currency = "USD" }
ach = { currency = "USD" }
-[pm_filters.stripe]
-cashapp = {country = "US", currency = "USD"}
-
[pm_filters.prophetpay]
card_redirect = { currency = "USD" }
+[pm_filters.helcim]
+credit = { currency = "USD" }
+debit = { currency = "USD" }
+
[connector_customer]
connector_list = "gocardless,stax,stripe"
payout_connector_list = "wise"
[bank_config.online_banking_fpx]
-adyen.banks = "affin_bank,agro_bank,alliance_bank,am_bank,bank_islam,bank_muamalat,bank_rakyat,bank_simpanan_nasional,cimb_bank,hong_leong_bank,hsbc_bank,kuwait_finance_house,may_bank,ocbc_bank,public_bank,rhb_bank,standard_chartered_bank,uob_bank"
+adyen.banks = "affin_bank,agro_bank,alliance_bank,am_bank,bank_islam,bank_muamalat,bank_rakyat,bank_simpanan_nasional,cimb_bank,hong_leong_bank,hsbc_bank,kuwait_finance_house,maybank,ocbc_bank,public_bank,rhb_bank,standard_chartered_bank,uob_bank"
[bank_config.online_banking_thailand]
adyen.banks = "bangkok_bank,krungsri_bank,krung_thai_bank,the_siam_commercial_bank,kasikorn_bank"
@@ -434,18 +470,45 @@ adyen.banks = "bangkok_bank,krungsri_bank,krung_thai_bank,the_siam_commercial_ba
supported_connectors = "braintree"
[applepay_decrypt_keys]
-apple_pay_ppc = "APPLE_PAY_PAYMENT_PROCESSING_CERTIFICATE" #Payment Processing Certificate provided by Apple Pay (https://developer.apple.com/) Certificates, Identifiers & Profiles > Apple Pay Payment Processing Certificate
-apple_pay_ppc_key = "APPLE_PAY_PAYMENT_PROCESSING_CERTIFICATE_KEY" #Private key generate by Elliptic-curve prime256v1 curve
-apple_pay_merchant_cert = "APPLE_PAY_MERCHNAT_CERTIFICATE" #Merchant Certificate provided by Apple Pay (https://developer.apple.com/) Certificates, Identifiers & Profiles > Apple Pay Merchant Identity Certificate
-apple_pay_merchant_cert_key = "APPLE_PAY_MERCHNAT_CERTIFICATE_KEY" #Private key generate by RSA:2048 algorithm
+apple_pay_ppc = "APPLE_PAY_PAYMENT_PROCESSING_CERTIFICATE" # Payment Processing Certificate provided by Apple Pay (https://developer.apple.com/) Certificates, Identifiers & Profiles > Apple Pay Payment Processing Certificate
+apple_pay_ppc_key = "APPLE_PAY_PAYMENT_PROCESSING_CERTIFICATE_KEY" # Private key generated by Elliptic-curve prime256v1 curve. You can use `openssl ecparam -out private.key -name prime256v1 -genkey` to generate the private key
+apple_pay_merchant_cert = "APPLE_PAY_MERCHNAT_CERTIFICATE" # Merchant Certificate provided by Apple Pay (https://developer.apple.com/) Certificates, Identifiers & Profiles > Apple Pay Merchant Identity Certificate
+apple_pay_merchant_cert_key = "APPLE_PAY_MERCHNAT_CERTIFICATE_KEY" # Private key generated by RSA:2048 algorithm. Refer Hyperswitch Docs (https://docs.hyperswitch.io/hyperswitch-cloud/payment-methods-setup/wallets/apple-pay/ios-application/) to generate the private key
+
+[applepay_merchant_configs]
+# Run below command to get common merchant identifier for applepay in shell
+#
+# CERT_PATH="path/to/certificate.pem"
+# MERCHANT_ID=$(openssl x509 -in "$CERT_PATH" -noout -text |
+# awk -v oid="1.2.840.113635.100.6.32" '
+# BEGIN { RS = "\n\n" }
+# /X509v3 extensions/ { in_extension=1 }
+# in_extension && /'"$oid"'/ { print $0; exit }' |
+# grep -oE '\.@[A-F0-9]+' | sed 's/^\.@//'
+# )
+# echo "Merchant ID: $MERCHANT_ID"
+common_merchant_identifier = "APPLE_PAY_COMMON_MERCHANT_IDENTIFIER" # This can be obtained by decrypting the apple_pay_ppc_key as shown above in comments
+merchant_cert = "APPLE_PAY_MERCHANT_CERTIFICATE" # Merchant Certificate provided by Apple Pay (https://developer.apple.com/) Certificates, Identifiers & Profiles > Apple Pay Merchant Identity Certificate
+merchant_cert_key = "APPLE_PAY_MERCHANT_CERTIFICATE_KEY" # Private key generate by RSA:2048 algorithm. Refer Hyperswitch Docs (https://docs.hyperswitch.io/hyperswitch-cloud/payment-methods-setup/wallets/apple-pay/ios-application/) to generate the private key
+applepay_endpoint = "https://apple-pay-gateway.apple.com/paymentservices/registerMerchant" # Apple pay gateway merchant endpoint
[payment_link]
-sdk_url = "http://localhost:9090/dist/HyperLoader.js"
+sdk_url = "http://localhost:9090/0.16.7/v0/HyperLoader.js"
+
+[payment_method_auth]
+redis_expiry = 900
+pm_auth_key = "Some_pm_auth_key"
# Analytics configuration.
[analytics]
source = "sqlx" # The Analytics source/strategy to be used
+[analytics.clickhouse]
+username = "" # Clickhouse username
+password = "" # Clickhouse password (optional)
+host = "" # Clickhouse host in http(s)://: format
+database_name = "" # Clickhouse database name
+
[analytics.sqlx]
username = "db_user" # Analytics DB Username
password = "db_pass" # Analytics DB Password
@@ -454,8 +517,38 @@ port = 5432 # Analytics DB Port
dbname = "hyperswitch_db" # Name of Database
pool_size = 5 # Number of connections to keep open
connection_timeout = 10 # Timeout for database connection in seconds
+queue_strategy = "Fifo" # Add the queue strategy used by the database bb8 client
# Config for KV setup
[kv_config]
# TTL for KV in seconds
ttl = 900
+
+[frm]
+enabled = true
+
+[paypal_onboarding]
+client_id = "paypal_client_id" # Client ID for PayPal onboarding
+client_secret = "paypal_secret_key" # Secret key for PayPal onboarding
+partner_id = "paypal_partner_id" # Partner ID for PayPal onboarding
+enabled = true # Switch to enable or disable PayPal onboarding
+
+[events]
+source = "logs" # The event sink to push events supports kafka or logs (stdout)
+
+[events.kafka]
+brokers = [] # Kafka broker urls for bootstrapping the client
+intent_analytics_topic = "topic" # Kafka topic to be used for PaymentIntent events
+attempt_analytics_topic = "topic" # Kafka topic to be used for PaymentAttempt events
+refund_analytics_topic = "topic" # Kafka topic to be used for Refund events
+api_logs_topic = "topic" # Kafka topic to be used for incoming api events
+connector_logs_topic = "topic" # Kafka topic to be used for connector api events
+outgoing_webhook_logs_topic = "topic" # Kafka topic to be used for outgoing webhook events
+
+# File storage configuration
+[file_storage]
+file_storage_backend = "aws_s3" # File storage backend to be used
+
+[file_storage.aws_s3]
+region = "us-east-1" # The AWS region used by the AWS S3 for file storage
+bucket_name = "bucket1" # The AWS S3 bucket name for file storage
diff --git a/config/deployments/README.md b/config/deployments/README.md
new file mode 100644
index 000000000000..c807892f1e06
--- /dev/null
+++ b/config/deployments/README.md
@@ -0,0 +1,158 @@
+# Configs for deployments
+
+## Introduction
+
+This directory contains the configs for deployments of Hyperswitch in different hosted environments.
+
+Hyperswitch has **3** components namely,
+
+- router
+- drainer
+- scheduler
+ - consumer
+ - producer
+
+We maintain configs for the `router` component for 3 different environments, namely,
+
+- Integration Test
+- Sandbox
+- Production
+
+To learn about what "router", "drainer" and "scheduler" is, please refer to the [Hyperswitch architecture][architecture] documentation.
+
+### Tree structure
+
+```text
+config/deployments # Root directory for the deployment configs
+├── README.md # This file
+├── drainer.toml # Config specific to drainer
+├── env_specific.toml # Config for environment specific values which are meant to be sensitive (to be set by the user)
+├── integration_test.toml # Config specific to integration_test environment
+├── production.toml # Config specific to production environment
+├── sandbox.toml # Config specific to sandbox environment
+└── scheduler # Directory for scheduler configs
+ ├── consumer.toml # Config specific to consumer
+ └── producer.toml # Config specific to producer
+```
+
+## Router
+
+The `integration_test.toml`, `sandbox.toml`, and `production.toml` files are configuration files for the environments `integration_test`, `sandbox`, and `production`, respectively. These files maintain a 1:1 mapping with the environment names, and it is recommended to use the same name for the environment throughout this document.
+
+### Generating a Config File for the Router
+
+The `env_specific.toml` file contains values that are specific to the environment. This file is kept separate because the values in it are sensitive and are meant to be set by the user. The `env_specific.toml` file is merged with the `integration_test.toml`, `sandbox.toml`, or `production.toml` file to create the final configuration file for the router.
+
+For example, to build and deploy Hyperswitch in the **sandbox environment**, you can duplicate the `env_specific.toml` file and rename it as `sandbox_config.toml`. Then, update the values in the file with the proper values for the sandbox environment.
+
+The environment-specific `sandbox.toml` file, which contains the Hyperswitch recommended defaults, is merged with the `sandbox_config.toml` file to create the final configuration file called `sandbox_release.toml`. This file is marked as ready for deploying on the sandbox environment.
+
+1. Duplicate the `env_specific.toml` file and rename it as `sandbox_config.toml`:
+
+ ```shell
+ cp config/deployments/env_specific.toml config/deployments/sandbox_config.toml
+ ```
+
+2. Update the values in the `sandbox_config.toml` file with the proper values for the sandbox environment:
+
+ ```shell
+ vi config/deployments/sandbox_config.toml
+ ```
+
+3. To merge the files you can use `cat`:
+
+ ```shell
+ cat config/deployments/sandbox.toml config/deployments/sandbox_config.toml > config/deployments/sandbox_release.toml
+ ```
+
+> [!NOTE]
+> You can refer to the [`config.example.toml`][config_example] file to understand the variables that used are in the `env_specific.toml` file.
+
+## Scheduler
+
+The scheduler has two components, namely `consumer` and `producer`.
+
+The `consumer.toml` and `producer.toml` files are the configuration files for the `consumer` and `producer`, respectively. These files contain the default values recommended by Hyperswitch.
+
+### Generating a Config File for the Scheduler
+
+Scheduler configuration files are built on top of the router configuration files. So, the `sandbox_release.toml` file is merged with the `consumer.toml` or `producer.toml` file to create the final configuration file for the scheduler.
+
+You can use `cat` to merge the files in the terminal.
+
+- Below is an example for consumer in sandbox environment:
+
+ ```shell
+ cat config/deployments/scheduler/consumer.toml config/deployments/sandbox_release.toml > config/deployments/consumer_sandbox_release.toml
+ ```
+
+- Below is an example for producer in sandbox environment:
+
+ ```shell
+ cat config/deployments/scheduler/producer.toml config/deployments/sandbox_release.toml > config/deployments/producer_sandbox_release.toml
+ ```
+
+## Drainer
+
+Drainer is an independent component, and hence, the drainer configs can be used directly provided that the user updates the `drainer.toml` file with proper values before using.
+
+## Running Hyperswitch through Docker Compose
+
+To run the router, you can use the following snippet in the `docker-compose.yml` file:
+
+```yaml
+### Application services
+hyperswitch-server:
+ image: juspaydotin/hyperswitch-router:latest # This pulls the latest image from Docker Hub. If you wish to use a version without added features (like KMS), you can replace `latest` with `standalone`. However, please note that the standalone version is not recommended for production use.
+ command: /local/bin/router --config-path /local/config/deployments/sandbox_release.toml # <--- Change this to the config file that is generated for the environment.
+ ports:
+ - "8080:8080"
+ volumes:
+ - ./config:/local/config
+```
+
+To run the producer, you can use the following snippet in the `docker-compose.yml` file:
+
+```yaml
+hyperswitch-producer:
+ image: juspaydotin/hyperswitch-producer:latest
+ command: /local/bin/scheduler --config-path /local/config/deployments/producer_sandbox_release.toml # <--- Change this to the config file that is generated for the environment.
+ volumes:
+ - ./config:/local/config
+ environment:
+ - SCHEDULER_FLOW=producer
+```
+
+To run the consumer, you can use the following snippet in the `docker-compose.yml` file:
+
+```yaml
+hyperswitch-consumer:
+ image: juspaydotin/hyperswitch-consumer:latest
+ command: /local/bin/scheduler --config-path /local/config/deployments/consumer_sandbox_release.toml # <--- Change this to the config file that is generated for the environment
+ volumes:
+ - ./config:/local/config
+ environment:
+ - SCHEDULER_FLOW=consumer
+```
+
+To run the drainer, you can use the following snippet in the `docker-compose.yml` file:
+
+```yaml
+hyperswitch-drainer:
+ image: juspaydotin/hyperswitch-drainer:latest
+ command: /local/bin/drainer --config-path /local/config/deployments/drainer.toml
+ volumes:
+ - ./config:/local/config
+```
+
+> [!NOTE]
+> You can replace the term `sandbox` with the environment name that you are deploying to (e.g., `production`, `integration_test`, etc.) with respective changes (optional) and use the same steps to generate the final configuration file for the environment.
+
+You can verify that the server is up and running by hitting the health check endpoint:
+
+```shell
+curl --head --request GET 'http://localhost:8080/health'
+```
+
+[architecture]: /docs/architecture.md
+[config_example]: /config/config.example.toml
diff --git a/config/deployments/drainer.toml b/config/deployments/drainer.toml
new file mode 100644
index 000000000000..42c89cbfd584
--- /dev/null
+++ b/config/deployments/drainer.toml
@@ -0,0 +1,39 @@
+[drainer]
+loop_interval = 500
+max_read_count = 100
+num_partitions = 64
+shutdown_interval = 1000
+stream_name = "drainer_stream"
+
+[kms]
+key_id = "kms_key_id"
+region = "kms_region"
+
+[log.console]
+enabled = true
+level = "DEBUG"
+log_format = "json"
+
+[log.telemetry]
+metrics_enabled = true
+otel_exporter_otlp_endpoint = "http://localhost:4317"
+
+[master_database]
+dbname = "master_database_name"
+host = "localhost"
+password = "master_database_password"
+pool_size = 3
+port = 5432
+username = "username"
+
+[redis]
+cluster_enabled = false
+cluster_urls = ["redis.cluster.uri-1:8080", "redis.cluster.uri-2:4115"] # List of redis cluster urls
+default_ttl = 300
+host = "localhost"
+pool_size = 5
+port = 6379
+reconnect_delay = 5
+reconnect_max_attempts = 5
+stream_read_count = 1
+use_legacy_version = false
diff --git a/config/deployments/env_specific.toml b/config/deployments/env_specific.toml
new file mode 100644
index 000000000000..04831376050d
--- /dev/null
+++ b/config/deployments/env_specific.toml
@@ -0,0 +1,215 @@
+# For explanantion of each config, please refer to the `config/config.example.toml` file
+
+[analytics.clickhouse]
+username = "clickhouse_username" # Clickhouse username
+password = "clickhouse_password" # Clickhouse password (optional)
+host = "http://localhost:8123" # Clickhouse host in http(s)://: format
+database_name = "clickhouse_db_name" # Clickhouse database name
+
+# Analytics configuration.
+[analytics]
+source = "sqlx" # The Analytics source/strategy to be used
+
+[analytics.sqlx]
+username = "db_user" # Analytics DB Username
+password = "db_pass" # Analytics DB Password
+host = "localhost" # Analytics DB Host
+port = 5432 # Analytics DB Port
+dbname = "hyperswitch_db" # Name of Database
+pool_size = 5 # Number of connections to keep open
+connection_timeout = 10 # Timeout for database connection in seconds
+queue_strategy = "Fifo" # Add the queue strategy used by the database bb8 client
+
+[api_keys]
+kms_encrypted_hash_key = "base64_encoded_ciphertext" # Base64-encoded (KMS encrypted) ciphertext of the API key hashing key
+hash_key = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef" # API key hashing key. Only applicable when KMS is disabled.
+
+[applepay_decrypt_keys]
+apple_pay_ppc = "APPLE_PAY_PAYMENT_PROCESSING_CERTIFICATE" # Payment Processing Certificate provided by Apple Pay (https://developer.apple.com/) Certificates, Identifiers & Profiles > Apple Pay Payment Processing Certificate
+apple_pay_ppc_key = "APPLE_PAY_PAYMENT_PROCESSING_CERTIFICATE_KEY" # Private key generated by Elliptic-curve prime256v1 curve. You can use `openssl ecparam -out private.key -name prime256v1 -genkey` to generate the private key
+apple_pay_merchant_cert = "APPLE_PAY_MERCHNAT_CERTIFICATE" # Merchant Certificate provided by Apple Pay (https://developer.apple.com/) Certificates, Identifiers & Profiles > Apple Pay Merchant Identity Certificate
+apple_pay_merchant_cert_key = "APPLE_PAY_MERCHNAT_CERTIFICATE_KEY" # Private key generated by RSA:2048 algorithm. Refer Hyperswitch Docs (https://docs.hyperswitch.io/hyperswitch-cloud/payment-methods-setup/wallets/apple-pay/ios-application/) to generate the private key
+
+[applepay_merchant_configs]
+common_merchant_identifier = "APPLE_PAY_COMMON_MERCHANT_IDENTIFIER" # Refer to config.example.toml to learn how you can generate this value
+merchant_cert = "APPLE_PAY_MERCHANT_CERTIFICATE" # Merchant Certificate provided by Apple Pay (https://developer.apple.com/) Certificates, Identifiers & Profiles > Apple Pay Merchant Identity Certificate
+merchant_cert_key = "APPLE_PAY_MERCHANT_CERTIFICATE_KEY" # Private key generate by RSA:2048 algorithm. Refer Hyperswitch Docs (https://docs.hyperswitch.io/hyperswitch-cloud/payment-methods-setup/wallets/apple-pay/ios-application/) to generate the private key
+applepay_endpoint = "https://apple-pay-gateway.apple.com/paymentservices/registerMerchant" # Apple pay gateway merchant endpoint
+
+[connector_onboarding.paypal]
+enabled = true # boolean
+client_id = "paypal_client_id"
+client_secret = "paypal_client_secret"
+partner_id = "paypal_partner_id"
+
+[connector_request_reference_id_config]
+merchant_ids_send_payment_id_as_connector_request_id = ["merchant_id_1", "merchant_id_2", "etc.,"]
+
+# EmailClient configuration. Only applicable when the `email` feature flag is enabled.
+[email]
+sender_email = "example@example.com" # Sender email
+aws_region = "" # AWS region used by AWS SES
+base_url = "" # Dashboard base url used when adding links that should redirect to self, say https://app.hyperswitch.io for example
+allowed_unverified_days = 1 # Number of days the api calls ( with jwt token ) can be made without verifying the email
+active_email_client = "SES" # The currently active email client
+
+# Configuration for aws ses, applicable when the active email client is SES
+[email.aws_ses]
+email_role_arn = "" # The amazon resource name ( arn ) of the role which has permission to send emails
+sts_role_session_name = "" # An identifier for the assumed role session, used to uniquely identify a session.
+
+[events]
+source = "logs" # The event sink to push events supports kafka or logs (stdout)
+
+[events.kafka]
+brokers = [] # Kafka broker urls for bootstrapping the client
+intent_analytics_topic = "topic" # Kafka topic to be used for PaymentIntent events
+attempt_analytics_topic = "topic" # Kafka topic to be used for PaymentAttempt events
+refund_analytics_topic = "topic" # Kafka topic to be used for Refund events
+api_logs_topic = "topic" # Kafka topic to be used for incoming api events
+connector_logs_topic = "topic" # Kafka topic to be used for connector api events
+outgoing_webhook_logs_topic = "topic" # Kafka topic to be used for outgoing webhook events
+
+# File storage configuration
+[file_storage]
+file_storage_backend = "aws_s3" # File storage backend to be used
+
+[file_storage.aws_s3]
+region = "bucket_region" # The AWS region used by AWS S3 for file storage
+bucket_name = "bucket" # The AWS S3 bucket name for file storage
+
+# This section provides configs for currency conversion api
+[forex_api]
+call_delay = 21600 # Api calls are made after every 6 hrs
+local_fetch_retry_count = 5 # Fetch from Local cache has retry count as 5
+local_fetch_retry_delay = 1000 # Retry delay for checking write condition
+api_timeout = 20000 # Api timeouts once it crosses 20000 ms
+api_key = "YOUR API KEY HERE" # Api key for making request to foreign exchange Api
+fallback_api_key = "YOUR API KEY" # Api key for the fallback service
+redis_lock_timeout = 26000 # Redis remains write locked for 26000 ms once the acquire_redis_lock is called
+
+[jwekey] # 3 priv/pub key pair
+vault_encryption_key = "" # public key in pem format, corresponding private key in rust locker
+rust_locker_encryption_key = "" # public key in pem format, corresponding private key in rust locker
+vault_private_key = "" # private key in pem format, corresponding public key in rust locker
+
+# KMS configuration. Only applicable when the `kms` feature flag is enabled.
+[kms]
+key_id = "" # The AWS key ID used by the KMS SDK for decrypting data.
+region = "" # The AWS region used by the KMS SDK for decrypting data.
+
+# Locker settings contain details for accessing a card locker, a
+# PCI Compliant storage entity which stores payment method information
+# like card details
+[locker]
+host = "" # Locker host
+host_rs = "" # Rust Locker host
+mock_locker = true # Emulate a locker locally using Postgres
+locker_signing_key_id = "1" # Key_id to sign basilisk hs locker
+locker_enabled = true # Boolean to enable or disable saving cards in locker
+redis_temp_locker_encryption_key = "redis_temp_locker_encryption_key" # Encryption key for redis temp locker
+
+[log.console]
+enabled = true
+level = "DEBUG"
+log_format = "json"
+
+[log.file]
+enabled = false
+level = "DEBUG"
+log_format = "json"
+
+# Telemetry configuration for metrics and traces
+[log.telemetry]
+traces_enabled = false # boolean [true or false], whether traces are enabled
+metrics_enabled = false # boolean [true or false], whether metrics are enabled
+ignore_errors = false # boolean [true or false], whether to ignore errors during traces or metrics pipeline setup
+sampling_rate = 0.1 # decimal rate between 0.0 - 1.0
+otel_exporter_otlp_endpoint = "http://localhost:4317" # endpoint to send metrics and traces to, can include port number
+otel_exporter_otlp_timeout = 5000 # timeout (in milliseconds) for sending metrics and traces
+use_xray_generator = false # Set this to true for AWS X-ray compatible traces
+route_to_trace = ["*/confirm"]
+
+[lock_settings]
+delay_between_retries_in_milliseconds = 500 # Delay between retries in milliseconds
+redis_lock_expiry_seconds = 180 # Seconds before the redis lock expires
+
+# Main SQL data store credentials
+[master_database]
+username = "db_user" # DB Username
+password = "db_pass" # DB Password. Use base-64 encoded kms encrypted value here when kms is enabled
+host = "localhost" # DB Host
+port = 5432 # DB Port
+dbname = "hyperswitch_db" # Name of Database
+pool_size = 5 # Number of connections to keep open
+connection_timeout = 10 # Timeout for database connection in seconds
+queue_strategy = "Fifo" # Add the queue strategy used by the database bb8 client
+
+[payment_link]
+sdk_url = "http://localhost:9090/0.16.7/v0/HyperLoader.js"
+
+[payment_method_auth]
+pm_auth_key = "pm_auth_key" # Payment method auth key used for authorization
+redis_expiry = 900 # Redis expiry time in milliseconds
+
+[proxy]
+http_url = "http://proxy_http_url" # Outgoing proxy http URL to proxy the HTTP traffic
+https_url = "https://proxy_https_url" # Outgoing proxy https URL to proxy the HTTPS traffic
+
+# Redis credentials
+[redis]
+host = "127.0.0.1"
+port = 6379
+pool_size = 5 # Number of connections to keep open
+reconnect_max_attempts = 5 # Maximum number of reconnection attempts to make before failing. Set to 0 to retry forever.
+reconnect_delay = 5 # Delay between reconnection attempts, in milliseconds
+default_ttl = 300 # Default TTL for entries, in seconds
+default_hash_ttl = 900 # Default TTL for hashes entries, in seconds
+use_legacy_version = false # RESP protocol for fred crate (set this to true if using RESPv2 or redis version < 6)
+stream_read_count = 1 # Default number of entries to read from stream if not provided in stream read options
+auto_pipeline = true # Whether or not the client should automatically pipeline commands across tasks when possible.
+disable_auto_backpressure = false # Whether or not to disable the automatic backpressure features when pipelining is enabled.
+max_in_flight_commands = 5000 # The maximum number of in-flight commands (per connection) before backpressure will be applied.
+default_command_timeout = 0 # An optional timeout to apply to all commands.
+max_feed_count = 200 # The maximum number of frames that will be fed to a socket before flushing.
+cluster_enabled = true # boolean
+cluster_urls = ["redis.cluster.uri-1:8080", "redis.cluster.uri-2:4115"] # List of redis cluster urls
+
+# Replica SQL data store credentials
+[replica_database]
+username = "replica_user" # DB Username
+password = "db_pass" # DB Password. Use base-64 encoded kms encrypted value here when kms is enabled
+host = "localhost" # DB Host
+port = 5432 # DB Port
+dbname = "hyperswitch_db" # Name of Database
+pool_size = 5 # Number of connections to keep open
+connection_timeout = 10 # Timeout for database connection in seconds
+queue_strategy = "Fifo" # Add the queue strategy used by the database bb8 client
+
+[report_download_config]
+dispute_function = "report_download_config_dispute_function" # Config to download dispute report
+payment_function = "report_download_config_payment_function" # Config to download payment report
+refund_function = "report_download_config_refund_function" # Config to download refund report
+region = "report_download_config_region" # Region of the bucket
+
+# This section provides some secret values.
+[secrets]
+master_enc_key = "sample_key" # Master Encryption key used to encrypt merchant wise encryption key. Should be 32-byte long.
+admin_api_key = "test_admin" # admin API key for admin authentication. Only applicable when KMS is disabled.
+kms_encrypted_admin_api_key = "" # Base64-encoded (KMS encrypted) ciphertext of the admin_api_key. Only applicable when KMS is enabled.
+jwt_secret = "secret" # JWT secret used for user authentication. Only applicable when KMS is disabled.
+kms_encrypted_jwt_secret = "" # Base64-encoded (KMS encrypted) ciphertext of the jwt_secret. Only applicable when KMS is enabled.
+recon_admin_api_key = "recon_test_admin" # recon_admin API key for recon authentication. Only applicable when KMS is disabled.
+kms_encrypted_recon_admin_api_key = "" # Base64-encoded (KMS encrypted) ciphertext of the recon_admin_api_key. Only applicable when KMS is enabled
+
+# Server configuration
+[server]
+base_url = "https://server_base_url"
+workers = 8
+port = 8080
+host = "127.0.0.1"
+# This is the grace time (in seconds) given to the actix-server to stop the execution
+# For more details: https://actix.rs/docs/server/#graceful-shutdown
+shutdown_timeout = 30
+# HTTP Request body limit. Defaults to 32kB
+request_body_limit = 32_768
diff --git a/config/deployments/integration_test.toml b/config/deployments/integration_test.toml
new file mode 100644
index 000000000000..2c5d16e3e3c7
--- /dev/null
+++ b/config/deployments/integration_test.toml
@@ -0,0 +1,281 @@
+[bank_config]
+eps.adyen.banks = "bank_austria,bawag_psk_ag,dolomitenbank,easybank_ag,erste_bank_und_sparkassen,hypo_tirol_bank_ag,posojilnica_bank_e_gen,raiffeisen_bankengruppe_osterreich,schoellerbank_ag,sparda_bank_wien,volksbank_gruppe,volkskreditbank_ag"
+eps.stripe.banks = "arzte_und_apotheker_bank,austrian_anadi_bank_ag,bank_austria,bankhaus_carl_spangler,bankhaus_schelhammer_und_schattera_ag,bawag_psk_ag,bks_bank_ag,brull_kallmus_bank_ag,btv_vier_lander_bank,capital_bank_grawe_gruppe_ag,dolomitenbank,easybank_ag,erste_bank_und_sparkassen,hypo_alpeadriabank_international_ag,hypo_noe_lb_fur_niederosterreich_u_wien,hypo_oberosterreich_salzburg_steiermark,hypo_tirol_bank_ag,hypo_vorarlberg_bank_ag,hypo_bank_burgenland_aktiengesellschaft,marchfelder_bank,oberbank_ag,raiffeisen_bankengruppe_osterreich,schoellerbank_ag,sparda_bank_wien,volksbank_gruppe,volkskreditbank_ag,vr_bank_braunau"
+ideal.adyen.banks = "abn_amro,asn_bank,bunq,handelsbanken,ing,knab,moneyou,rabobank,regiobank,revolut,sns_bank,triodos_bank,van_lanschot"
+ideal.stripe.banks = "abn_amro,asn_bank,bunq,handelsbanken,ing,knab,moneyou,rabobank,regiobank,revolut,sns_bank,triodos_bank,van_lanschot"
+online_banking_czech_republic.adyen.banks = "ceska_sporitelna,komercni_banka,platnosc_online_karta_platnicza"
+online_banking_fpx.adyen.banks = "affin_bank,agro_bank,alliance_bank,am_bank,bank_islam,bank_muamalat,bank_rakyat,bank_simpanan_nasional,cimb_bank,hong_leong_bank,hsbc_bank,kuwait_finance_house,maybank,ocbc_bank,public_bank,rhb_bank,standard_chartered_bank,uob_bank"
+online_banking_poland.adyen.banks = "blik_psp,place_zipko,m_bank,pay_with_ing,santander_przelew24,bank_pekaosa,bank_millennium,pay_with_alior_bank,banki_spoldzielcze,pay_with_inteligo,bnp_paribas_poland,bank_nowy_sa,credit_agricole,pay_with_bos,pay_with_citi_handlowy,pay_with_plus_bank,toyota_bank,velo_bank,e_transfer_pocztowy24"
+online_banking_slovakia.adyen.banks = "e_platby_vub,postova_banka,sporo_pay,tatra_pay,viamo"
+online_banking_thailand.adyen.banks = "bangkok_bank,krungsri_bank,krung_thai_bank,the_siam_commercial_bank,kasikorn_bank"
+open_banking_uk.adyen.banks = "aib,bank_of_scotland,danske_bank,first_direct,first_trust,halifax,lloyds,monzo,nat_west,nationwide_bank,royal_bank_of_scotland,starling,tsb_bank,tesco_bank,ulster_bank,barclays,hsbc_bank,revolut,santander_przelew24,open_bank_success,open_bank_failure,open_bank_cancelled"
+przelewy24.stripe.banks = "alior_bank,bank_millennium,bank_nowy_bfg_sa,bank_pekao_sa,banki_spbdzielcze,blik,bnp_paribas,boz,citi,credit_agricole,e_transfer_pocztowy24,getin_bank,idea_bank,inteligo,mbank_mtransfer,nest_przelew,noble_pay,pbac_z_ipko,plus_bank,santander_przelew24,toyota_bank,volkswagen_bank"
+
+[connectors]
+aci.base_url = "https://eu-test.oppwa.com/"
+adyen.base_url = "https://checkout-test.adyen.com/"
+adyen.secondary_base_url = "https://pal-test.adyen.com/"
+airwallex.base_url = "https://api-demo.airwallex.com/"
+applepay.base_url = "https://apple-pay-gateway.apple.com/"
+authorizedotnet.base_url = "https://apitest.authorize.net/xml/v1/request.api"
+bambora.base_url = "https://api.na.bambora.com"
+bankofamerica.base_url = "https://apitest.merchant-services.bankofamerica.com/"
+bitpay.base_url = "https://test.bitpay.com"
+bluesnap.base_url = "https://sandbox.bluesnap.com/"
+bluesnap.secondary_base_url = "https://sandpay.bluesnap.com/"
+boku.base_url = "https://$-api4-stage.boku.com"
+braintree.base_url = "https://api.sandbox.braintreegateway.com/"
+braintree.secondary_base_url = "https://payments.sandbox.braintree-api.com/graphql"
+cashtocode.base_url = "https://cluster05.api-test.cashtocode.com"
+checkout.base_url = "https://api.sandbox.checkout.com/"
+coinbase.base_url = "https://api.commerce.coinbase.com"
+cryptopay.base_url = "https://business-sandbox.cryptopay.me"
+cybersource.base_url = "https://apitest.cybersource.com/"
+dlocal.base_url = "https://sandbox.dlocal.com/"
+dummyconnector.base_url = "http://localhost:8080/dummy-connector"
+fiserv.base_url = "https://cert.api.fiservapps.com/"
+forte.base_url = "https://sandbox.forte.net/api/v3"
+globalpay.base_url = "https://apis.sandbox.globalpay.com/ucp/"
+globepay.base_url = "https://pay.globepay.co/"
+gocardless.base_url = "https://api-sandbox.gocardless.com"
+helcim.base_url = "https://api.helcim.com/"
+iatapay.base_url = "https://sandbox.iata-pay.iata.org/api/v1"
+klarna.base_url = "https://api-na.playground.klarna.com/"
+mollie.base_url = "https://api.mollie.com/v2/"
+mollie.secondary_base_url = "https://api.cc.mollie.com/v1/"
+multisafepay.base_url = "https://testapi.multisafepay.com/"
+nexinets.base_url = "https://apitest.payengine.de/v1"
+nmi.base_url = "https://secure.nmi.com/"
+noon.base_url = "https://api-test.noonpayments.com/"
+noon.key_mode = "Test"
+nuvei.base_url = "https://ppp-test.nuvei.com/"
+opayo.base_url = "https://pi-test.sagepay.com/"
+opennode.base_url = "https://dev-api.opennode.com"
+payeezy.base_url = "https://api-cert.payeezy.com/"
+payme.base_url = "https://sandbox.payme.io/"
+paypal.base_url = "https://api-m.sandbox.paypal.com/"
+payu.base_url = "https://secure.snd.payu.com/"
+placetopay.base_url = "https://test.placetopay.com/rest/gateway"
+powertranz.base_url = "https://staging.ptranz.com/api/"
+prophetpay.base_url = "https://ccm-thirdparty.cps.golf/"
+rapyd.base_url = "https://sandboxapi.rapyd.net"
+shift4.base_url = "https://api.shift4.com/"
+signifyd.base_url = "https://api.signifyd.com/"
+riskified.base_url = "https://sandbox.riskified.com/api"
+square.base_url = "https://connect.squareupsandbox.com/"
+square.secondary_base_url = "https://pci-connect.squareupsandbox.com/"
+stax.base_url = "https://apiprod.fattlabs.com/"
+stripe.base_url = "https://api.stripe.com/"
+stripe.base_url_file_upload = "https://files.stripe.com/"
+trustpay.base_url = "https://test-tpgw.trustpay.eu/"
+trustpay.base_url_bank_redirects = "https://aapi.trustpay.eu/"
+tsys.base_url = "https://stagegw.transnox.com/"
+volt.base_url = "https://api.sandbox.volt.io/"
+wise.base_url = "https://api.sandbox.transferwise.tech/"
+worldline.base_url = "https://eu.sandbox.api-ingenico.com/"
+worldpay.base_url = "https://try.access.worldpay.com/"
+zen.base_url = "https://api.zen-test.com/"
+zen.secondary_base_url = "https://secure.zen-test.com/"
+
+[dummy_connector]
+enabled = true
+assets_base_url = "https://app.hyperswitch.io/assets/TestProcessor/"
+authorize_ttl = 36000
+default_return_url = "https://app.hyperswitch.io/"
+discord_invite_url = "https://discord.gg/wJZ7DVW8mm"
+payment_complete_duration = 500
+payment_complete_tolerance = 100
+payment_duration = 1000
+payment_retrieve_duration = 500
+payment_retrieve_tolerance = 100
+payment_tolerance = 100
+payment_ttl = 172800
+refund_duration = 1000
+refund_retrieve_duration = 500
+refund_retrieve_tolerance = 100
+refund_tolerance = 100
+refund_ttl = 172800
+slack_invite_url = "https://join.slack.com/t/hyperswitch-io/shared_invite/zt-1k6cz4lee-SAJzhz6bjmpp4jZCDOtOIg"
+
+[frm]
+enabled = true
+
+[connector_customer]
+connector_list = "gocardless,stax,stripe"
+payout_connector_list = "wise"
+
+[delayed_session_response]
+connectors_with_delayed_session_response = "trustpay,payme"
+
+[mandates.supported_payment_methods]
+bank_debit.ach.connector_list = "gocardless"
+bank_debit.becs.connector_list = "gocardless"
+bank_debit.sepa.connector_list = "gocardless"
+card.credit.connector_list = "stripe,adyen,authorizedotnet,cybersource,globalpay,worldpay,multisafepay,nmi,nexinets,noon"
+card.debit.connector_list = "stripe,adyen,authorizedotnet,cybersource,globalpay,worldpay,multisafepay,nmi,nexinets,noon"
+pay_later.klarna.connector_list = "adyen"
+wallet.apple_pay.connector_list = "stripe,adyen,cybersource,noon"
+wallet.google_pay.connector_list = "stripe,adyen,cybersource"
+wallet.paypal.connector_list = "adyen"
+bank_redirect.ideal = {connector_list = "stripe,adyen,globalpay"}
+bank_redirect.sofort = {connector_list = "stripe,adyen,globalpay"}
+
+[multiple_api_version_supported_connectors]
+supported_connectors = "braintree"
+
+[payouts]
+payout_eligibility = true
+
+[pm_filters.default]
+affirm = { country = "US", currency = "USD" }
+afterpay_clearpay = { country = "US,CA,GB,AU,NZ,FR,ES", currency = "GBP" }
+apple_pay = { country = "AU,CN,HK,JP,MO,MY,NZ,SG,TW,AM,AT,AZ,BY,BE,BG,HR,CY,CZ,DK,EE,FO,FI,FR,GE,DE,GR,GL,GG,HU,IS,IE,IM,IT,KZ,JE,LV,LI,LT,LU,MT,MD,MC,ME,NL,NO,PL,PT,RO,SM,RS,SK,SI,ES,SE,CH,UA,GB,AR,CO,CR,BR,MX,PE,BH,IL,JO,KW,PS,QA,SA,AE,CA,UM,US,KR,VN,MA,ZA,VA,CL,SV,GT,HN,PA", currency = "AED,AUD,CHF,CAD,EUR,GBP,HKD,SGD,USD" }
+eps = { country = "AT", currency = "EUR" }
+giropay = { country = "DE", currency = "EUR" }
+google_pay.country = "AL,DZ,AS,AO,AG,AR,AU,AT,AZ,BH,BY,BE,BR,BG,CA,CL,CO,HR,CZ,DK,DO,EG,EE,FI,FR,DE,GR,HK,HU,IN,ID,IE,IL,IT,JP,JO,KZ,KE,KW,LV,LB,LT,LU,MY,MX,NL,NZ,NO,OM,PK,PA,PE,PH,PL,PT,QA,RO,RU,SA,SG,SK,ZA,ES,LK,SE,CH,TW,TH,TR,UA,AE,GB,US,UY,VN"
+ideal = { country = "NL", currency = "EUR" }
+klarna = { country = "AT,BE,DK,FI,FR,DE,IE,IT,NL,NO,ES,SE,GB,US,CA", currency = "USD,GBP,EUR,CHF,DKK,SEK,NOK,AUD,PLN,CAD" }
+paypal.currency = "AUD,BRL,CAD,CZK,DKK,EUR,HKD,HUF,INR,JPY,MYR,MXN,NZD,NOK,PHP,PLN,RUB,GBP,SGD,SEK,CHF,THB,USD"
+sofort = { country = "ES,GB,SE,AT,NL,DE,CH,BE,FR,FI,IT,PL", currency = "EUR" }
+
+[pm_filters.adyen]
+ach = { country = "US", currency = "USD" }
+affirm = { country = "US", currency = "USD" }
+afterpay_clearpay = { country = "AU,NZ,ES,GB,FR,IT,CA,US", currency = "GBP" }
+alfamart = { country = "ID", currency = "IDR" }
+ali_pay = { country = "AU,JP,HK,SG,MY,TH,ES,GB,SE,NO,AT,NL,DE,CY,CH,BE,FR,DK,FI,RO,MT,SI,GR,PT,IE,IT,CA,US", currency = "USD,EUR,GBP,JPY,AUD,SGD,CHF,SEK,NOK,NZD,THB,HKD,CAD" }
+ali_pay_hk = { country = "HK", currency = "HKD" }
+alma = { country = "FR", currency = "EUR" }
+apple_pay = { country = "AU,NZ,CN,JP,HK,SG,MY,BH,AE,KW,BR,ES,GB,SE,NO,AT,NL,DE,HU,CY,LU,CH,BE,FR,DK,FI,RO,HR,LI,UA,MT,SI,GR,PT,IE,CZ,EE,LT,LV,IT,PL,IS,CA,US", currency = "AUD,CHF,CAD,EUR,GBP,HKD,SGD,USD" }
+atome = { country = "MY,SG", currency = "MYR,SGD" }
+bacs = { country = "GB", currency = "GBP" }
+bancontact_card = { country = "BE", currency = "EUR" }
+bca_bank_transfer = { country = "ID", currency = "IDR" }
+bizum = { country = "ES", currency = "EUR" }
+blik = { country = "PL", currency = "PLN" }
+bni_va = { country = "ID", currency = "IDR" }
+boleto = { country = "BR", currency = "BRL" }
+bri_va = { country = "ID", currency = "IDR" }
+cimb_va = { country = "ID", currency = "IDR" }
+dana = { country = "ID", currency = "IDR" }
+danamon_va = { country = "ID", currency = "IDR" }
+eps = { country = "AT", currency = "EUR" }
+family_mart = { country = "JP", currency = "JPY" }
+gcash = { country = "PH", currency = "PHP" }
+giropay = { country = "DE", currency = "EUR" }
+go_pay = { country = "ID", currency = "IDR" }
+google_pay = { country = "AU,NZ,JP,HK,SG,MY,TH,VN,BH,AE,KW,BR,ES,GB,SE,NO,SK,AT,NL,DE,HU,CY,LU,CH,BE,FR,DK,RO,HR,LI,MT,SI,GR,PT,IE,CZ,EE,LT,LV,IT,PL,TR,IS,CA,US", currency = "AED,ALL,AMD,ANG,AOA,ARS,AUD,AWG,AZN,BAM,BBD,BDT,BGN,BHD,BMD,BND,BOB,BRL,BSD,BWP,BYN,BZD,CAD,CHF,CLP,CNY,COP,CRC,CUP,CVE,CZK,DJF,DKK,DOP,DZD,EGP,ETB,EUR,FJD,FKP,GBP,GEL,GHS,GIP,GMD,GNF,GTQ,GYD,HKD,HNL,HTG,HUF,IDR,ILS,INR,IQD,JMD,JOD,JPY,KES,KGS,KHR,KMF,KRW,KWD,KYD,KZT,LAK,LBP,LKR,LYD,MAD,MDL,MKD,MMK,MNT,MOP,MRU,MUR,MVR,MWK,MXN,MYR,MZN,NAD,NGN,NIO,NOK,NPR,NZD,OMR,PAB,PEN,PGK,PHP,PKR,PLN,PYG,QAR,RON,RSD,RUB,RWF,SAR,SBD,SCR,SEK,SGD,SHP,SLE,SOS,SRD,STN,SVC,SZL,THB,TND,TOP,TRY,TTD,TWD,TZS,UAH,UGX,USD,UYU,UZS,VES,VND,VUV,WST,XAF,XCD,XOF,XPF,YER,ZAR,ZMW" }
+ideal = { country = "NL", currency = "EUR" }
+indomaret = { country = "ID", currency = "IDR" }
+kakao_pay = { country = "KR", currency = "KRW" }
+klarna = { country = "AT,ES,GB,SE,NO,AT,NL,DE,CH,BE,FR,DK,FI,PT,IE,IT,PL,CA,US", currency = "USD,GBP,EUR,CHF,DKK,SEK,NOK,AUD,PLN,CAD" }
+lawson = { country = "JP", currency = "JPY" }
+mandiri_va = { country = "ID", currency = "IDR" }
+mb_way = { country = "PT", currency = "EUR" }
+mini_stop = { country = "JP", currency = "JPY" }
+mobile_pay = { country = "DK,FI", currency = "DKK,SEK,NOK,EUR" }
+momo = { country = "VN", currency = "VND" }
+momo_atm = { country = "VN", currency = "VND" }
+online_banking_czech_republic = { country = "CZ", currency = "EUR,CZK" }
+online_banking_finland = { country = "FI", currency = "EUR" }
+online_banking_fpx = { country = "MY", currency = "MYR" }
+online_banking_poland = { country = "PL", currency = "PLN" }
+online_banking_slovakia = { country = "SK", currency = "EUR,CZK" }
+online_banking_thailand = { country = "TH", currency = "THB" }
+open_banking_uk = { country = "GB", currency = "GBP" }
+oxxo = { country = "MX", currency = "MXN" }
+pay_bright = { country = "CA", currency = "CAD" }
+pay_easy = { country = "JP", currency = "JPY" }
+pay_safe_card = { country = "AT,AU,BE,BR,BE,CA,HR,CY,CZ,DK,FI,FR,GE,DE,GI,HU,IS,IE,KW,LV,IE,LI,LT,LU,MT,MX,MD,ME,NL,NZ,NO,PY,PE,PL,PT,RO,SA,RS,SK,SI,ES,SE,CH,TR,AE,GB,US,UY", currency = "EUR,AUD,BRL,CAD,CZK,DKK,GEL,GIP,HUF,KWD,CHF,MXN,MDL,NZD,NOK,PYG,PEN,PLN,RON,SAR,RSD,SEK,TRY,AED,GBP,USD,UYU" }
+paypal = { country = "AU,NZ,CN,JP,HK,MY,TH,KR,PH,ID,AE,KW,BR,ES,GB,SE,NO,SK,AT,NL,DE,HU,CY,LU,CH,BE,FR,DK,FI,RO,HR,UA,MT,SI,GI,PT,IE,CZ,EE,LT,LV,IT,PL,IS,CA,US", currency = "AUD,BRL,CAD,CZK,DKK,EUR,HKD,HUF,INR,JPY,MYR,MXN,NZD,NOK,PHP,PLN,RUB,GBP,SGD,SEK,CHF,THB,USD" }
+permata_bank_transfer = { country = "ID", currency = "IDR" }
+seicomart = { country = "JP", currency = "JPY" }
+sepa = { country = "ES,SK,AT,NL,DE,BE,FR,FI,PT,IE,EE,LT,LV,IT", currency = "EUR" }
+seven_eleven = { country = "JP", currency = "JPY" }
+sofort = { country = "ES,GB,SE,AT,NL,DE,CH,BE,FR,FI,IT,PL", currency = "EUR" }
+swish = { country = "SE", currency = "SEK" }
+touch_n_go = { country = "MY", currency = "MYR" }
+trustly = { country = "ES,GB,SE,NO,AT,NL,DE,DK,FI,EE,LT,LV", currency = "CZK,DKK,EUR,GBP,NOK,SEK" }
+twint = { country = "CH", currency = "CHF" }
+vipps = { country = "NO", currency = "NOK" }
+walley = { country = "SE,NO,DK,FI", currency = "DKK,EUR,NOK,SEK" }
+we_chat_pay = { country = "AU,NZ,CN,JP,HK,SG,ES,GB,SE,NO,AT,NL,DE,CY,CH,BE,FR,DK,LI,MT,SI,GR,PT,IT,CA,US", currency = "AUD,CAD,CNY,EUR,GBP,HKD,JPY,NZD,SGD,USD,CNY" }
+
+[pm_filters.authorizedotnet]
+google_pay.currency = "CHF,DKK,EUR,GBP,NOK,PLN,SEK,USD,AUD,NZD,CAD"
+paypal.currency = "CHF,DKK,EUR,GBP,NOK,PLN,SEK,USD,AUD,NZD,CAD"
+
+[pm_filters.braintree]
+paypal.currency = "AUD,BRL,CAD,CNY,CZK,DKK,EUR,HKD,HUF,ILS,JPY,MYR,MXN,TWD,NZD,NOK,PHP,PLN,GBP,RUB,SGD,SEK,CHF,THB,USD"
+
+[pm_filters.forte]
+credit.currency = "USD"
+debit.currency = "USD"
+
+[pm_filters.helcim]
+credit.currency = "USD"
+debit.currency = "USD"
+
+[pm_filters.globepay]
+ali_pay.currency = "GBP,CNY"
+we_chat_pay.currency = "GBP,CNY"
+
+[pm_filters.klarna]
+klarna = { country = "AU,AT,BE,CA,CZ,DK,FI,FR,DE,GR,IE,IT,NL,NZ,NO,PL,PT,ES,SE,CH,GB,US", currency = "CHF,DKK,EUR,GBP,NOK,PLN,SEK,USD,AUD,NZD,CAD" }
+
+[pm_filters.prophetpay]
+card_redirect.currency = "USD"
+
+[pm_filters.stax]
+ach = { country = "US", currency = "USD" }
+
+[pm_filters.stripe]
+affirm = { country = "US", currency = "USD" }
+afterpay_clearpay = { country = "US,CA,GB,AU,NZ,FR,ES", currency = "USD,CAD,GBP,AUD,NZD" }
+apple_pay.country = "AU,CN,HK,JP,MO,MY,NZ,SG,TW,AM,AT,AZ,BY,BE,BG,HR,CY,CZ,DK,EE,FO,FI,FR,GE,DE,GR,GL,GG,HU,IS,IE,IM,IT,KZ,JE,LV,LI,LT,LU,MT,MD,MC,ME,NL,NO,PL,PT,RO,SM,RS,SK,SI,ES,SE,CH,UA,GB,AR,CO,CR,BR,MX,PE,BH,IL,JO,KW,PS,QA,SA,AE,CA,UM,US,KR,VN,MA,ZA,VA,CL,SV,GT,HN,PA"
+cashapp = { country = "US", currency = "USD" }
+eps = { country = "AT", currency = "EUR" }
+giropay = { country = "DE", currency = "EUR" }
+google_pay.country = "AL,DZ,AS,AO,AG,AR,AU,AT,AZ,BH,BY,BE,BR,BG,CA,CL,CO,HR,CZ,DK,DO,EG,EE,FI,FR,DE,GR,HK,HU,IN,ID,IE,IL,IT,JP,JO,KZ,KE,KW,LV,LB,LT,LU,MY,MX,NL,NZ,NO,OM,PK,PA,PE,PH,PL,PT,QA,RO,RU,SA,SG,SK,ZA,ES,LK,SE,CH,TW,TH,TR,UA,AE,GB,US,UY,VN"
+ideal = { country = "NL", currency = "EUR" }
+klarna = { country = "AU,AT,BE,CA,CZ,DK,FI,FR,DE,GR,IE,IT,NL,NZ,NO,PL,PT,ES,SE,CH,GB,US", currency = "AUD,CAD,CHF,CZK,DKK,EUR,GBP,NOK,NZD,PLN,SEK,USD" }
+sofort = { country = "AT,BE,DE,IT,NL,ES", currency = "EUR" }
+
+[pm_filters.worldpay]
+apple_pay.country = "AU,CN,HK,JP,MO,MY,NZ,SG,TW,AM,AT,AZ,BY,BE,BG,HR,CY,CZ,DK,EE,FO,FI,FR,GE,DE,GR,GL,GG,HU,IS,IE,IM,IT,KZ,JE,LV,LI,LT,LU,MT,MD,MC,ME,NL,NO,PL,PT,RO,SM,RS,SK,SI,ES,SE,CH,UA,GB,AR,CO,CR,BR,MX,PE,BH,IL,JO,KW,PS,QA,SA,AE,CA,UM,US"
+google_pay.country = "AL,DZ,AS,AO,AG,AR,AU,AT,AZ,BH,BY,BE,BR,BG,CA,CL,CO,HR,CZ,DK,DO,EG,EE,FI,FR,DE,GR,HK,HU,IN,ID,IE,IL,IT,JP,JO,KZ,KE,KW,LV,LB,LT,LU,MY,MX,NL,NZ,NO,OM,PK,PA,PE,PH,PL,PT,QA,RO,RU,SA,SG,SK,ZA,ES,LK,SE,CH,TW,TH,TR,UA,AE,GB,US,UY,VN"
+
+[pm_filters.zen]
+boleto = { country = "BR", currency = "BRL" }
+efecty = { country = "CO", currency = "COP" }
+multibanco = { country = "PT", currency = "EUR" }
+pago_efectivo = { country = "PE", currency = "PEN" }
+pix = { country = "BR", currency = "BRL" }
+pse = { country = "CO", currency = "COP" }
+red_compra = { country = "CL", currency = "CLP" }
+red_pagos = { country = "UY", currency = "UYU" }
+
+[temp_locker_enable_config]
+bluesnap.payment_method = "card"
+nuvei.payment_method = "card"
+shift4.payment_method = "card"
+stripe.payment_method = "bank_transfer"
+bankofamerica = { payment_method = "card" }
+cybersource = { payment_method = "card" }
+nmi.payment_method = "card"
+
+[tokenization]
+braintree = { long_lived_token = false, payment_method = "card" }
+checkout = { long_lived_token = false, payment_method = "wallet", apple_pay_pre_decrypt_flow = "network_tokenization" }
+gocardless = { long_lived_token = true, payment_method = "bank_debit" }
+mollie = { long_lived_token = false, payment_method = "card" }
+payme = { long_lived_token = false, payment_method = "card" }
+square = { long_lived_token = false, payment_method = "card" }
+stax = { long_lived_token = true, payment_method = "card,bank_debit" }
+stripe = { long_lived_token = false, payment_method = "wallet", payment_method_type = { list = "google_pay", type = "disable_only" } }
+
+[webhooks]
+outgoing_enabled = true
+
+[webhook_source_verification_call]
+connectors_with_webhook_source_verification_call = "paypal"
diff --git a/config/deployments/production.toml b/config/deployments/production.toml
new file mode 100644
index 000000000000..964281c52bba
--- /dev/null
+++ b/config/deployments/production.toml
@@ -0,0 +1,297 @@
+[bank_config]
+eps.adyen.banks = "bank_austria,bawag_psk_ag,dolomitenbank,easybank_ag,erste_bank_und_sparkassen,hypo_tirol_bank_ag,posojilnica_bank_e_gen,raiffeisen_bankengruppe_osterreich,schoellerbank_ag,sparda_bank_wien,volksbank_gruppe,volkskreditbank_ag"
+eps.stripe.banks = "arzte_und_apotheker_bank,austrian_anadi_bank_ag,bank_austria,bankhaus_carl_spangler,bankhaus_schelhammer_und_schattera_ag,bawag_psk_ag,bks_bank_ag,brull_kallmus_bank_ag,btv_vier_lander_bank,capital_bank_grawe_gruppe_ag,dolomitenbank,easybank_ag,erste_bank_und_sparkassen,hypo_alpeadriabank_international_ag,hypo_noe_lb_fur_niederosterreich_u_wien,hypo_oberosterreich_salzburg_steiermark,hypo_tirol_bank_ag,hypo_vorarlberg_bank_ag,hypo_bank_burgenland_aktiengesellschaft,marchfelder_bank,oberbank_ag,raiffeisen_bankengruppe_osterreich,schoellerbank_ag,sparda_bank_wien,volksbank_gruppe,volkskreditbank_ag,vr_bank_braunau"
+ideal.adyen.banks = "abn_amro,asn_bank,bunq,handelsbanken,ing,knab,moneyou,rabobank,regiobank,revolut,sns_bank,triodos_bank,van_lanschot"
+ideal.stripe.banks = "abn_amro,asn_bank,bunq,handelsbanken,ing,knab,moneyou,rabobank,regiobank,revolut,sns_bank,triodos_bank,van_lanschot"
+online_banking_czech_republic.adyen.banks = "ceska_sporitelna,komercni_banka,platnosc_online_karta_platnicza"
+online_banking_fpx.adyen.banks = "affin_bank,agro_bank,alliance_bank,am_bank,bank_islam,bank_muamalat,bank_rakyat,bank_simpanan_nasional,cimb_bank,hong_leong_bank,hsbc_bank,kuwait_finance_house,maybank,ocbc_bank,public_bank,rhb_bank,standard_chartered_bank,uob_bank"
+online_banking_poland.adyen.banks = "blik_psp,place_zipko,m_bank,pay_with_ing,santander_przelew24,bank_pekaosa,bank_millennium,pay_with_alior_bank,banki_spoldzielcze,pay_with_inteligo,bnp_paribas_poland,bank_nowy_sa,credit_agricole,pay_with_bos,pay_with_citi_handlowy,pay_with_plus_bank,toyota_bank,velo_bank,e_transfer_pocztowy24"
+online_banking_slovakia.adyen.banks = "e_platby_vub,postova_banka,sporo_pay,tatra_pay,viamo,volksbank_gruppe,volkskreditbank_ag,vr_bank_braunau"
+online_banking_thailand.adyen.banks = "bangkok_bank,krungsri_bank,krung_thai_bank,the_siam_commercial_bank,kasikorn_bank"
+open_banking_uk.adyen.banks = "aib,bank_of_scotland,danske_bank,first_direct,first_trust,halifax,lloyds,monzo,nat_west,nationwide_bank,royal_bank_of_scotland,starling,tsb_bank,tesco_bank,ulster_bank,barclays,hsbc_bank,revolut,santander_przelew24,open_bank_success,open_bank_failure,open_bank_cancelled"
+przelewy24.stripe.banks = "alior_bank,bank_millennium,bank_nowy_bfg_sa,bank_pekao_sa,banki_spbdzielcze,blik,bnp_paribas,boz,citi,credit_agricole,e_transfer_pocztowy24,getin_bank,idea_bank,inteligo,mbank_mtransfer,nest_przelew,noble_pay,pbac_z_ipko,plus_bank,santander_przelew24,toyota_bank,volkswagen_bank"
+
+[connector_customer]
+connector_list = "stax,stripe,gocardless"
+payout_connector_list = "wise"
+
+[connectors]
+aci.base_url = "https://eu-test.oppwa.com/"
+adyen.base_url = "https://checkout-test.adyen.com/"
+adyen.secondary_base_url = "https://pal-test.adyen.com/"
+airwallex.base_url = "https://api-demo.airwallex.com/"
+applepay.base_url = "https://apple-pay-gateway.apple.com/"
+authorizedotnet.base_url = "https://apitest.authorize.net/xml/v1/request.api"
+bambora.base_url = "https://api.na.bambora.com"
+bankofamerica.base_url = "https://api.merchant-services.bankofamerica.com/"
+bitpay.base_url = "https://bitpay.com"
+bluesnap.base_url = "https://ws.bluesnap.com/"
+bluesnap.secondary_base_url = "https://pay.bluesnap.com/"
+boku.base_url = "https://country-api4-stage.boku.com"
+braintree.base_url = "https://api.sandbox.braintreegateway.com/"
+braintree.secondary_base_url = "https://payments.braintree-api.com/graphql"
+cashtocode.base_url = "https://cluster14.api.cashtocode.com"
+checkout.base_url = "https://api.checkout.com/"
+coinbase.base_url = "https://api.commerce.coinbase.com"
+cryptopay.base_url = "https://business.cryptopay.me/"
+cybersource.base_url = "https://api.cybersource.com/"
+dlocal.base_url = "https://sandbox.dlocal.com/"
+dummyconnector.base_url = "http://localhost:8080/dummy-connector"
+fiserv.base_url = "https://cert.api.fiservapps.com/"
+forte.base_url = "https://sandbox.forte.net/api/v3"
+globalpay.base_url = "https://apis.sandbox.globalpay.com/ucp/"
+globepay.base_url = "https://pay.globepay.co/"
+gocardless.base_url = "https://api.gocardless.com"
+helcim.base_url = "https://api.helcim.com/"
+iatapay.base_url = "https://iata-pay.iata.org/api/v1"
+klarna.base_url = "https://api-na.playground.klarna.com/"
+mollie.base_url = "https://api.mollie.com/v2/"
+mollie.secondary_base_url = "https://api.cc.mollie.com/v1/"
+multisafepay.base_url = "https://testapi.multisafepay.com/"
+nexinets.base_url = "https://api.payengine.de/v1"
+nmi.base_url = "https://secure.nmi.com/"
+noon.base_url = "https://api.noonpayments.com/"
+noon.key_mode = "Live"
+nuvei.base_url = "https://ppp-test.nuvei.com/"
+opayo.base_url = "https://pi-live.sagepay.com/"
+opennode.base_url = "https://api.opennode.com"
+payeezy.base_url = "https://api.payeezy.com/"
+payme.base_url = "https://live.payme.io/"
+paypal.base_url = "https://api-m.paypal.com/"
+payu.base_url = "https://secure.payu.com/api/"
+placetopay.base_url = "https://checkout.placetopay.com/rest/gateway"
+powertranz.base_url = "https://staging.ptranz.com/api/"
+prophetpay.base_url = "https://ccm-thirdparty.cps.golf/"
+rapyd.base_url = "https://sandboxapi.rapyd.net"
+riskified.base_url = "https://wh.riskified.com/api/"
+shift4.base_url = "https://api.shift4.com/"
+signifyd.base_url = "https://api.signifyd.com/"
+square.base_url = "https://connect.squareupsandbox.com/"
+square.secondary_base_url = "https://pci-connect.squareupsandbox.com/"
+stax.base_url = "https://apiprod.fattlabs.com/"
+stripe.base_url = "https://api.stripe.com/"
+stripe.base_url_file_upload = "https://files.stripe.com/"
+trustpay.base_url = "https://tpgw.trustpay.eu/"
+trustpay.base_url_bank_redirects = "https://aapi.trustpay.eu/"
+tsys.base_url = "https://gateway.transit-pass.com/"
+volt.base_url = "https://api.volt.io/"
+wise.base_url = "https://api.sandbox.transferwise.tech/"
+worldline.base_url = "https://eu.sandbox.api-ingenico.com/"
+worldpay.base_url = "https://try.access.worldpay.com/"
+zen.base_url = "https://api.zen.com/"
+zen.secondary_base_url = "https://secure.zen.com/"
+
+[delayed_session_response]
+connectors_with_delayed_session_response = "trustpay,payme"
+
+[dummy_connector]
+assets_base_url = "https://app.hyperswitch.io/assets/TestProcessor/"
+authorize_ttl = 36000
+default_return_url = "https://app.hyperswitch.io/"
+discord_invite_url = "https://discord.gg/wJZ7DVW8mm"
+enabled = false
+payment_complete_duration = 500
+payment_complete_tolerance = 100
+payment_duration = 1000
+payment_retrieve_duration = 500
+payment_retrieve_tolerance = 100
+payment_tolerance = 100
+payment_ttl = 172800
+refund_duration = 1000
+refund_retrieve_duration = 500
+refund_retrieve_tolerance = 100
+refund_tolerance = 100
+refund_ttl = 172800
+slack_invite_url = "https://join.slack.com/t/hyperswitch-io/shared_invite/zt-1k6cz4lee-SAJzhz6bjmpp4jZCDOtOIg"
+
+[frm]
+enabled = false
+
+[mandates.supported_payment_methods]
+bank_debit.ach.connector_list = "gocardless"
+bank_debit.becs.connector_list = "gocardless"
+bank_debit.sepa.connector_list = "gocardless"
+card.credit.connector_list = "stripe,adyen,authorizedotnet,cybersource,globalpay,worldpay,multisafepay,nmi,nexinets,noon"
+card.debit.connector_list = "stripe,adyen,authorizedotnet,cybersource,globalpay,worldpay,multisafepay,nmi,nexinets,noon"
+pay_later.klarna.connector_list = "adyen"
+wallet.apple_pay.connector_list = "stripe,adyen,cybersource,noon"
+wallet.google_pay.connector_list = "stripe,adyen,cybersource"
+wallet.paypal.connector_list = "adyen"
+bank_redirect.ideal = {connector_list = "stripe,adyen,globalpay"}
+bank_redirect.sofort = {connector_list = "stripe,adyen,globalpay"}
+
+[multiple_api_version_supported_connectors]
+supported_connectors = "braintree"
+
+[payouts]
+payout_eligibility = true
+
+[pm_filters.default]
+ach = { country = "US", currency = "USD" }
+affirm = { country = "US", currency = "USD" }
+afterpay_clearpay = { country = "AU,NZ,ES,GB,FR,IT,CA,US", currency = "GBP" }
+ali_pay = { country = "AU,JP,HK,SG,MY,TH,ES,GB,SE,NO,AT,NL,DE,CY,CH,BE,FR,DK,FI,RO,MT,SI,GR,PT,IE,IT,CA,US", currency = "USD,EUR,GBP,JPY,AUD,SGD,CHF,SEK,NOK,NZD,THB,HKD,CAD,CNY" }
+apple_pay = { country = "AU,CN,HK,JP,MO,MY,NZ,SG,TW,AM,AT,AZ,BY,BE,BG,HR,CY,CZ,DK,EE,FO,FI,FR,GE,DE,GR,GL,GG,HU,IS,IE,IM,IT,KZ,JE,LV,LI,LT,LU,MT,MD,MC,ME,NL,NO,PL,PT,RO,SM,RS,SK,SI,ES,SE,CH,UA,GB,AR,CO,CR,BR,MX,PE,BH,IL,JO,KW,PS,QA,SA,AE,CA,UM,US,KR,VN,MA,ZA,VA,CL,SV,GT,HN,PA", currency = "AUD,CHF,CAD,EUR,GBP,HKD,SGD,USD" }
+bacs = { country = "GB", currency = "GBP" }
+bancontact_card = { country = "BE", currency = "EUR" }
+blik = { country = "PL", currency = "PLN" }
+eps = { country = "AT", currency = "EUR" }
+giropay = { country = "DE", currency = "EUR" }
+google_pay = { country = "AU,NZ,JP,HK,SG,MY,TH,VN,BH,AE,KW,BR,ES,GB,SE,NO,SK,AT,NL,DE,HU,CY,LU,CH,BE,FR,DK,RO,HR,LI,MT,SI,GR,PT,IE,CZ,EE,LT,LV,IT,PL,TR,IS,CA,US", currency = "AED,ALL,AMD,ANG,AOA,ARS,AUD,AWG,AZN,BAM,BBD,BDT,BGN,BHD,BMD,BND,BOB,BRL,BSD,BWP,BYN,BZD,CAD,CHF,CLP,CNY,COP,CRC,CUP,CVE,CZK,DJF,DKK,DOP,DZD,EGP,ETB,EUR,FJD,FKP,GBP,GEL,GHS,GIP,GMD,GNF,GTQ,GYD,HKD,HNL,HTG,HUF,IDR,ILS,INR,IQD,JMD,JOD,JPY,KES,KGS,KHR,KMF,KRW,KWD,KYD,KZT,LAK,LBP,LKR,LYD,MAD,MDL,MKD,MMK,MNT,MOP,MRU,MUR,MVR,MWK,MXN,MYR,MZN,NAD,NGN,NIO,NOK,NPR,NZD,OMR,PAB,PEN,PGK,PHP,PKR,PLN,PYG,QAR,RON,RSD,RUB,RWF,SAR,SBD,SCR,SEK,SGD,SHP,SLE,SOS,SRD,STN,SVC,SZL,THB,TND,TOP,TRY,TTD,TWD,TZS,UAH,UGX,USD,UYU,UZS,VES,VND,VUV,WST,XAF,XCD,XOF,XPF,YER,ZAR,ZMW" }
+ideal = { country = "NL", currency = "EUR" }
+klarna = { country = "AT,ES,GB,SE,NO,AT,NL,DE,CH,BE,FR,DK,FI,PT,IE,IT,PL,CA,US", currency = "USD,GBP,EUR,CHF,DKK,SEK,NOK,AUD,PLN,CAD" }
+mb_way = { country = "PT", currency = "EUR" }
+mobile_pay = { country = "DK,FI", currency = "DKK,SEK,NOK,EUR" }
+online_banking_czech_republic = { country = "CZ", currency = "EUR,CZK" }
+online_banking_finland = { country = "FI", currency = "EUR" }
+online_banking_poland = { country = "PL", currency = "PLN" }
+online_banking_slovakia = { country = "SK", currency = "EUR,CZK" }
+pay_bright = { country = "CA", currency = "CAD" }
+paypal = { country = "AU,NZ,CN,JP,HK,MY,TH,KR,PH,ID,AE,KW,BR,ES,GB,SE,NO,SK,AT,NL,DE,HU,CY,LU,CH,BE,FR,DK,FI,RO,HR,UA,MT,SI,GI,PT,IE,CZ,EE,LT,LV,IT,PL,IS,CA,US", currency = "AUD,BRL,CAD,CZK,DKK,EUR,HKD,HUF,INR,JPY,MYR,MXN,NZD,NOK,PHP,PLN,RUB,GBP,SGD,SEK,CHF,THB,USD" }
+sepa = { country = "ES,SK,AT,NL,DE,BE,FR,FI,PT,IE,EE,LT,LV,IT", currency = "EUR" }
+sofort = { country = "ES,GB,SE,AT,NL,DE,CH,BE,FR,FI,IT,PL", currency = "EUR" }
+trustly = { country = "ES,GB,SE,NO,AT,NL,DE,DK,FI,EE,LT,LV", currency = "CZK,DKK,EUR,GBP,NOK,SEK" }
+walley = { country = "SE,NO,DK,FI", currency = "DKK,EUR,NOK,SEK" }
+we_chat_pay = { country = "AU,NZ,CN,JP,HK,SG,ES,GB,SE,NO,AT,NL,DE,CY,CH,BE,FR,DK,LI,MT,SI,GR,PT,IT,CA,US", currency = "AUD,CAD,CNY,EUR,GBP,HKD,JPY,NZD,SGD,USD" }
+
+[pm_filters.adyen]
+ach = { country = "US", currency = "USD" }
+affirm = { country = "US", currency = "USD" }
+afterpay_clearpay = { country = "AU,CA,ES,FR,IT,NZ,GB,US", currency = "USD,AUD,CAD,NZD,GBP" }
+alfamart = { country = "ID", currency = "IDR" }
+ali_pay = { country = "AU,JP,HK,SG,MY,TH,ES,GB,SE,NO,AT,NL,DE,CY,CH,BE,FR,DK,FI,RO,MT,SI,GR,PT,IE,IT,CA,US", currency = "USD,EUR,GBP,JPY,AUD,SGD,CHF,SEK,NOK,NZD,THB,HKD,CAD" }
+ali_pay_hk = { country = "HK", currency = "HKD" }
+alma = { country = "FR", currency = "EUR" }
+apple_pay = { country = "AE,AM,AR,AT,AU,AZ,BE,BG,BH,BR,BY,CA,CH,CN,CO,CR,CY,CZ,DE,DK,EE,ES,FI,FO,FR,GB,GE,GG,GL,GR,HK,HR,HU,IE,IL,IM,IS,IT,JE,JO,JP,KW,KZ,LI,LT,LU,LV,MC,MD,ME,MO,MT,MX,MY,NL,NO,NZ,PE,PL,PS,PT,QA,RO,RS,SA,SE,SG,SI,SK,SM,TW,UA,GB,UM,US", currency = "AUD,CHF,CAD,EUR,GBP,HKD,SGD,USD" }
+atome = { country = "MY,SG", currency = "MYR,SGD" }
+bacs = { country = "GB", currency = "GBP" }
+bancontact_card = { country = "BE", currency = "EUR" }
+bca_bank_transfer = { country = "ID", currency = "IDR" }
+bizum = { country = "ES", currency = "EUR" }
+blik = { country = "PL", currency = "PLN" }
+bni_va = { country = "ID", currency = "IDR" }
+boleto = { country = "BR", currency = "BRL" }
+bri_va = { country = "ID", currency = "IDR" }
+cimb_va = { country = "ID", currency = "IDR" }
+dana = { country = "ID", currency = "IDR" }
+danamon_va = { country = "ID", currency = "IDR" }
+eps = { country = "AT", currency = "EUR" }
+family_mart = { country = "JP", currency = "JPY" }
+gcash = { country = "PH", currency = "PHP" }
+giropay = { country = "DE", currency = "EUR" }
+go_pay = { country = "ID", currency = "IDR" }
+google_pay = { country = "AE,AG,AL,AO,AR,AS,AT,AU,AZ,BE,BG,BH,BR,BY,CA,CH,CL,CO,CY,CZ,DE,DK,DO,DZ,EE,EG,ES,FI,FR,GB,GR,HK,HR,HU,ID,IE,IL,IN,IS,IT,JO,JP,KE,KW,KZ,LB,LI,LK,LT,LU,LV,MT,MX,MY,NL,NO,NZ,OM,PA,PE,PH,PK,PL,PT,QA,RO,RU,SA,SE,SG,SI,SK,TH,TR,TW,UA,GB,US,UY,VN,ZA", currency = "AED,ALL,AMD,ANG,AOA,ARS,AUD,AWG,AZN,BAM,BBD,BDT,BGN,BHD,BMD,BND,BOB,BRL,BSD,BWP,BYN,BZD,CAD,CHF,CLP,CNY,COP,CRC,CUP,CVE,CZK,DJF,DKK,DOP,DZD,EGP,ETB,EUR,FJD,FKP,GBP,GEL,GHS,GIP,GMD,GNF,GTQ,GYD,HKD,HNL,HTG,HUF,IDR,ILS,INR,IQD,JMD,JOD,JPY,KES,KGS,KHR,KMF,KRW,KWD,KYD,KZT,LAK,LBP,LKR,LYD,MAD,MDL,MKD,MMK,MNT,MOP,MRU,MUR,MVR,MWK,MXN,MYR,MZN,NAD,NGN,NIO,NOK,NPR,NZD,OMR,PAB,PEN,PGK,PHP,PKR,PLN,PYG,QAR,RON,RSD,RUB,RWF,SAR,SBD,SCR,SEK,SGD,SHP,SLE,SOS,SRD,STN,SVC,SZL,THB,TND,TOP,TRY,TTD,TWD,TZS,UAH,UGX,USD,UYU,UZS,VES,VND,VUV,WST,XAF,XCD,XOF,XPF,YER,ZAR,ZMW" }
+ideal = { country = "NL", currency = "EUR" }
+indomaret = { country = "ID", currency = "IDR" }
+kakao_pay = { country = "KR", currency = "KRW" }
+klarna = { country = "AT,BE,CA,CH,DE,DK,ES,FI,FR,GB,IE,IT,NL,NO,PL,PT,SE,GB,US", currency = "AUD,CAD,CHF,DKK,EUR,GBP,NOK,PLN,SEK,USD" }
+lawson = { country = "JP", currency = "JPY" }
+mandiri_va = { country = "ID", currency = "IDR" }
+mb_way = { country = "PT", currency = "EUR" }
+mini_stop = { country = "JP", currency = "JPY" }
+mobile_pay = { country = "DK,FI", currency = "DKK,SEK,NOK,EUR" }
+momo = { country = "VN", currency = "VND" }
+momo_atm = { country = "VN", currency = "VND" }
+online_banking_czech_republic = { country = "CZ", currency = "EUR,CZK" }
+online_banking_finland = { country = "FI", currency = "EUR" }
+online_banking_fpx = { country = "MY", currency = "MYR" }
+online_banking_poland = { country = "PL", currency = "PLN" }
+online_banking_slovakia = { country = "SK", currency = "EUR,CZK" }
+online_banking_thailand = { country = "TH", currency = "THB" }
+open_banking_uk = { country = "GB", currency = "GBP" }
+oxxo = { country = "MX", currency = "MXN" }
+pay_bright = { country = "CA", currency = "CAD" }
+pay_easy = { country = "JP", currency = "JPY" }
+pay_safe_card = { country = "AT,AU,BE,BR,BE,CA,HR,CY,CZ,DK,FI,FR,GE,DE,GI,HU,IS,IE,KW,LV,IE,LI,LT,LU,MT,MX,MD,ME,NL,NZ,NO,PY,PE,PL,PT,RO,SA,RS,SK,SI,ES,SE,CH,TR,AE,GB,US,UY", currency = "EUR,AUD,BRL,CAD,CZK,DKK,GEL,GIP,HUF,KWD,CHF,MXN,MDL,NZD,NOK,PYG,PEN,PLN,RON,SAR,RSD,SEK,TRY,AED,GBP,USD,UYU" }
+paypal = { country = "AU,NZ,CN,JP,HK,MY,TH,KR,PH,ID,AE,KW,BR,ES,GB,SE,NO,SK,AT,NL,DE,HU,CY,LU,CH,BE,FR,DK,FI,RO,HR,UA,MT,SI,GI,PT,IE,CZ,EE,LT,LV,IT,PL,IS,CA,US", currency = "AUD,BRL,CAD,CZK,DKK,EUR,HKD,HUF,INR,JPY,MYR,MXN,NZD,NOK,PHP,PLN,RUB,GBP,SGD,SEK,CHF,THB,USD" }
+permata_bank_transfer = { country = "ID", currency = "IDR" }
+seicomart = { country = "JP", currency = "JPY" }
+sepa = { country = "ES,SK,AT,NL,DE,BE,FR,FI,PT,IE,EE,LT,LV,IT", currency = "EUR" }
+seven_eleven = { country = "JP", currency = "JPY" }
+sofort = { country = "AT,BE,CH,DE,ES,FI,FR,GB,IT,NL,PL,SE,GB", currency = "EUR" }
+swish = { country = "SE", currency = "SEK" }
+touch_n_go = { country = "MY", currency = "MYR" }
+trustly = { country = "ES,GB,SE,NO,AT,NL,DE,DK,FI,EE,LT,LV", currency = "CZK,DKK,EUR,GBP,NOK,SEK" }
+twint = { country = "CH", currency = "CHF" }
+vipps = { country = "NO", currency = "NOK" }
+walley = { country = "SE,NO,DK,FI", currency = "DKK,EUR,NOK,SEK" }
+we_chat_pay = { country = "AU,NZ,CN,JP,HK,SG,ES,GB,SE,NO,AT,NL,DE,CY,CH,BE,FR,DK,LI,MT,SI,GR,PT,IT,CA,US", currency = "AUD,CAD,CNY,EUR,GBP,HKD,JPY,NZD,SGD,USD" }
+
+[pm_filters.authorizedotnet]
+google_pay.currency = "CHF,DKK,EUR,GBP,NOK,PLN,SEK,USD,AUD,NZD,CAD"
+paypal.currency = "CHF,DKK,EUR,GBP,NOK,PLN,SEK,USD,AUD,NZD,CAD"
+
+[pm_filters.braintree]
+paypal.currency = "AUD,BRL,CAD,CNY,CZK,DKK,EUR,HKD,HUF,ILS,JPY,MYR,MXN,TWD,NZD,NOK,PHP,PLN,GBP,RUB,SGD,SEK,CHF,THB,USD"
+
+[pm_filters.forte]
+credit.currency = "USD"
+debit.currency = "USD"
+
+[pm_filters.helcim]
+credit.currency = "USD"
+debit.currency = "USD"
+
+[pm_filters.globepay]
+ali_pay.currency = "GBP,CNY"
+we_chat_pay.currency = "GBP,CNY"
+
+[pm_filters.klarna]
+klarna = { country = "AU,AT,BE,CA,CZ,DK,FI,FR,DE,GR,IE,IT,NL,NZ,NO,PL,PT,ES,SE,CH,GB,US", currency = "CHF,DKK,EUR,GBP,NOK,PLN,SEK,USD,AUD,NZD,CAD" }
+
+[pm_filters.prophetpay]
+card_redirect.currency = "USD"
+
+[pm_filters.stax]
+ach = { country = "US", currency = "USD" }
+
+[pm_filters.stripe]
+affirm = { country = "US", currency = "USD" }
+afterpay_clearpay = { country = "US,CA,GB,AU,NZ,FR,ES", currency = "USD,CAD,GBP,AUD,NZD" }
+apple_pay.country = "AU,CN,HK,JP,MO,MY,NZ,SG,TW,AM,AT,AZ,BY,BE,BG,HR,CY,CZ,DK,EE,FO,FI,FR,GE,DE,GR,GL,GG,HU,IS,IE,IM,IT,KZ,JE,LV,LI,LT,LU,MT,MD,MC,ME,NL,NO,PL,PT,RO,SM,RS,SK,SI,ES,SE,CH,UA,GB,AR,CO,CR,BR,MX,PE,BH,IL,JO,KW,PS,QA,SA,AE,CA,UM,US,KR,VN,MA,ZA,VA,CL,SV,GT,HN,PA"
+cashapp = { country = "US", currency = "USD" }
+eps = { country = "AT", currency = "EUR" }
+giropay = { country = "DE", currency = "EUR" }
+google_pay.country = "AL,DZ,AS,AO,AG,AR,AU,AT,AZ,BH,BY,BE,BR,BG,CA,CL,CO,HR,CZ,DK,DO,EG,EE,FI,FR,DE,GR,HK,HU,IN,ID,IE,IL,IT,JP,JO,KZ,KE,KW,LV,LB,LT,LU,MY,MX,NL,NZ,NO,OM,PK,PA,PE,PH,PL,PT,QA,RO,RU,SA,SG,SK,ZA,ES,LK,SE,CH,TW,TH,TR,UA,AE,GB,US,UY,VN"
+ideal = { country = "NL", currency = "EUR" }
+klarna = { country = "AU,AT,BE,CA,CZ,DK,FI,FR,DE,GR,IE,IT,NL,NZ,NO,PL,PT,ES,SE,CH,GB,US", currency = "AUD,CAD,CHF,CZK,DKK,EUR,GBP,NOK,NZD,PLN,SEK,USD" }
+sofort = { country = "AT,BE,DE,IT,NL,ES", currency = "EUR" }
+
+[pm_filters.worldpay]
+apple_pay.country = "AU,CN,HK,JP,MO,MY,NZ,SG,TW,AM,AT,AZ,BY,BE,BG,HR,CY,CZ,DK,EE,FO,FI,FR,GE,DE,GR,GL,GG,HU,IS,IE,IM,IT,KZ,JE,LV,LI,LT,LU,MT,MD,MC,ME,NL,NO,PL,PT,RO,SM,RS,SK,SI,ES,SE,CH,UA,GB,AR,CO,CR,BR,MX,PE,BH,IL,JO,KW,PS,QA,SA,AE,CA,UM,US"
+google_pay.country = "AL,DZ,AS,AO,AG,AR,AU,AT,AZ,BH,BY,BE,BR,BG,CA,CL,CO,HR,CZ,DK,DO,EG,EE,FI,FR,DE,GR,HK,HU,IN,ID,IE,IL,IT,JP,JO,KZ,KE,KW,LV,LB,LT,LU,MY,MX,NL,NZ,NO,OM,PK,PA,PE,PH,PL,PT,QA,RO,RU,SA,SG,SK,ZA,ES,LK,SE,CH,TW,TH,TR,UA,AE,GB,US,UY,VN"
+
+[pm_filters.zen]
+boleto = { country = "BR", currency = "BRL" }
+efecty = { country = "CO", currency = "COP" }
+multibanco = { country = "PT", currency = "EUR" }
+pago_efectivo = { country = "PE", currency = "PEN" }
+pix = { country = "BR", currency = "BRL" }
+pse = { country = "CO", currency = "COP" }
+red_compra = { country = "CL", currency = "CLP" }
+red_pagos = { country = "UY", currency = "UYU" }
+
+[temp_locker_enable_config]
+bluesnap.payment_method = "card"
+nuvei.payment_method = "card"
+shift4.payment_method = "card"
+stripe.payment_method = "bank_transfer"
+bankofamerica = { payment_method = "card" }
+cybersource = { payment_method = "card" }
+nmi.payment_method = "card"
+
+[tokenization]
+braintree = { long_lived_token = false, payment_method = "card" }
+checkout = { long_lived_token = false, payment_method = "wallet", apple_pay_pre_decrypt_flow = "network_tokenization" }
+gocardless = { long_lived_token = true, payment_method = "bank_debit" }
+mollie = { long_lived_token = false, payment_method = "card" }
+payme = { long_lived_token = false, payment_method = "card" }
+square = { long_lived_token = false, payment_method = "card" }
+stax = { long_lived_token = true, payment_method = "card,bank_debit" }
+stripe = { long_lived_token = false, payment_method = "wallet", payment_method_type = { list = "google_pay", type = "disable_only" } }
+
+[webhooks]
+outgoing_enabled = true
+
+[webhook_source_verification_call]
+connectors_with_webhook_source_verification_call = "paypal"
diff --git a/config/deployments/sandbox.toml b/config/deployments/sandbox.toml
new file mode 100644
index 000000000000..aa2377cf8a08
--- /dev/null
+++ b/config/deployments/sandbox.toml
@@ -0,0 +1,299 @@
+[bank_config]
+eps.adyen.banks = "bank_austria,bawag_psk_ag,dolomitenbank,easybank_ag,erste_bank_und_sparkassen,hypo_tirol_bank_ag,posojilnica_bank_e_gen,raiffeisen_bankengruppe_osterreich,schoellerbank_ag,sparda_bank_wien,volksbank_gruppe,volkskreditbank_ag"
+eps.stripe.banks = "arzte_und_apotheker_bank,austrian_anadi_bank_ag,bank_austria,bankhaus_carl_spangler,bankhaus_schelhammer_und_schattera_ag,bawag_psk_ag,bks_bank_ag,brull_kallmus_bank_ag,btv_vier_lander_bank,capital_bank_grawe_gruppe_ag,dolomitenbank,easybank_ag,erste_bank_und_sparkassen,hypo_alpeadriabank_international_ag,hypo_noe_lb_fur_niederosterreich_u_wien,hypo_oberosterreich_salzburg_steiermark,hypo_tirol_bank_ag,hypo_vorarlberg_bank_ag,hypo_bank_burgenland_aktiengesellschaft,marchfelder_bank,oberbank_ag,raiffeisen_bankengruppe_osterreich,schoellerbank_ag,sparda_bank_wien,volksbank_gruppe,volkskreditbank_ag,vr_bank_braunau"
+ideal.adyen.banks = "abn_amro,asn_bank,bunq,handelsbanken,ing,knab,moneyou,rabobank,regiobank,revolut,sns_bank,triodos_bank,van_lanschot"
+ideal.stripe.banks = "abn_amro,asn_bank,bunq,handelsbanken,ing,knab,moneyou,rabobank,regiobank,revolut,sns_bank,triodos_bank,van_lanschot"
+online_banking_czech_republic.adyen.banks = "ceska_sporitelna,komercni_banka,platnosc_online_karta_platnicza"
+online_banking_fpx.adyen.banks = "affin_bank,agro_bank,alliance_bank,am_bank,bank_islam,bank_muamalat,bank_rakyat,bank_simpanan_nasional,cimb_bank,hong_leong_bank,hsbc_bank,kuwait_finance_house,maybank,ocbc_bank,public_bank,rhb_bank,standard_chartered_bank,uob_bank"
+online_banking_poland.adyen.banks = "blik_psp,place_zipko,m_bank,pay_with_ing,santander_przelew24,bank_pekaosa,bank_millennium,pay_with_alior_bank,banki_spoldzielcze,pay_with_inteligo,bnp_paribas_poland,bank_nowy_sa,credit_agricole,pay_with_bos,pay_with_citi_handlowy,pay_with_plus_bank,toyota_bank,velo_bank,e_transfer_pocztowy24"
+online_banking_slovakia.adyen.banks = "e_platby_vub,postova_banka,sporo_pay,tatra_pay,viamo"
+online_banking_thailand.adyen.banks = "bangkok_bank,krungsri_bank,krung_thai_bank,the_siam_commercial_bank,kasikorn_bank"
+open_banking_uk.adyen.banks = "aib,bank_of_scotland,danske_bank,first_direct,first_trust,halifax,lloyds,monzo,nat_west,nationwide_bank,royal_bank_of_scotland,starling,tsb_bank,tesco_bank,ulster_bank,barclays,hsbc_bank,revolut,santander_przelew24,open_bank_success,open_bank_failure,open_bank_cancelled"
+przelewy24.stripe.banks = "alior_bank,bank_millennium,bank_nowy_bfg_sa,bank_pekao_sa,banki_spbdzielcze,blik,bnp_paribas,boz,citi,credit_agricole,e_transfer_pocztowy24,getin_bank,idea_bank,inteligo,mbank_mtransfer,nest_przelew,noble_pay,pbac_z_ipko,plus_bank,santander_przelew24,toyota_bank,volkswagen_bank"
+
+[connector_customer]
+connector_list = "stax,stripe,gocardless"
+payout_connector_list = "wise"
+
+[connectors]
+aci.base_url = "https://eu-test.oppwa.com/"
+adyen.base_url = "https://checkout-test.adyen.com/"
+adyen.secondary_base_url = "https://pal-test.adyen.com/"
+airwallex.base_url = "https://api-demo.airwallex.com/"
+applepay.base_url = "https://apple-pay-gateway.apple.com/"
+authorizedotnet.base_url = "https://apitest.authorize.net/xml/v1/request.api"
+bambora.base_url = "https://api.na.bambora.com"
+bankofamerica.base_url = "https://apitest.merchant-services.bankofamerica.com/"
+bitpay.base_url = "https://test.bitpay.com"
+bluesnap.base_url = "https://sandbox.bluesnap.com/"
+bluesnap.secondary_base_url = "https://sandpay.bluesnap.com/"
+boku.base_url = "https://$-api4-stage.boku.com"
+braintree.base_url = "https://api.sandbox.braintreegateway.com/"
+braintree.secondary_base_url = "https://payments.sandbox.braintree-api.com/graphql"
+cashtocode.base_url = "https://cluster05.api-test.cashtocode.com"
+checkout.base_url = "https://api.sandbox.checkout.com/"
+coinbase.base_url = "https://api.commerce.coinbase.com"
+cryptopay.base_url = "https://business-sandbox.cryptopay.me"
+cybersource.base_url = "https://apitest.cybersource.com/"
+dlocal.base_url = "https://sandbox.dlocal.com/"
+dummyconnector.base_url = "http://localhost:8080/dummy-connector"
+fiserv.base_url = "https://cert.api.fiservapps.com/"
+forte.base_url = "https://sandbox.forte.net/api/v3"
+globalpay.base_url = "https://apis.sandbox.globalpay.com/ucp/"
+globepay.base_url = "https://pay.globepay.co/"
+gocardless.base_url = "https://api-sandbox.gocardless.com"
+helcim.base_url = "https://api.helcim.com/"
+iatapay.base_url = "https://iata-pay.iata.org/api/v1"
+klarna.base_url = "https://api-na.playground.klarna.com/"
+mollie.base_url = "https://api.mollie.com/v2/"
+mollie.secondary_base_url = "https://api.cc.mollie.com/v1/"
+multisafepay.base_url = "https://testapi.multisafepay.com/"
+nexinets.base_url = "https://apitest.payengine.de/v1"
+nmi.base_url = "https://secure.nmi.com/"
+noon.base_url = "https://api-test.noonpayments.com/"
+noon.key_mode = "Test"
+nuvei.base_url = "https://ppp-test.nuvei.com/"
+opayo.base_url = "https://pi-test.sagepay.com/"
+opennode.base_url = "https://dev-api.opennode.com"
+payeezy.base_url = "https://api-cert.payeezy.com/"
+payme.base_url = "https://sandbox.payme.io/"
+paypal.base_url = "https://api-m.sandbox.paypal.com/"
+payu.base_url = "https://secure.snd.payu.com/"
+placetopay.base_url = "https://test.placetopay.com/rest/gateway"
+powertranz.base_url = "https://staging.ptranz.com/api/"
+prophetpay.base_url = "https://ccm-thirdparty.cps.golf/"
+rapyd.base_url = "https://sandboxapi.rapyd.net"
+riskified.base_url = "https://sandbox.riskified.com/api"
+shift4.base_url = "https://api.shift4.com/"
+signifyd.base_url = "https://api.signifyd.com/"
+square.base_url = "https://connect.squareupsandbox.com/"
+square.secondary_base_url = "https://pci-connect.squareupsandbox.com/"
+stax.base_url = "https://apiprod.fattlabs.com/"
+stripe.base_url = "https://api.stripe.com/"
+stripe.base_url_file_upload = "https://files.stripe.com/"
+trustpay.base_url = "https://test-tpgw.trustpay.eu/"
+trustpay.base_url_bank_redirects = "https://aapi.trustpay.eu/"
+tsys.base_url = "https://stagegw.transnox.com/"
+volt.base_url = "https://api.sandbox.volt.io/"
+wise.base_url = "https://api.sandbox.transferwise.tech/"
+worldline.base_url = "https://eu.sandbox.api-ingenico.com/"
+worldpay.base_url = "https://try.access.worldpay.com/"
+zen.base_url = "https://api.zen-test.com/"
+zen.secondary_base_url = "https://secure.zen-test.com/"
+
+[delayed_session_response]
+connectors_with_delayed_session_response = "trustpay,payme"
+
+[dummy_connector]
+enabled = true
+assets_base_url = "https://app.hyperswitch.io/assets/TestProcessor/"
+authorize_ttl = 36000
+default_return_url = "https://app.hyperswitch.io/"
+discord_invite_url = "https://discord.gg/wJZ7DVW8mm"
+payment_complete_duration = 500
+payment_complete_tolerance = 100
+payment_duration = 1000
+payment_retrieve_duration = 500
+payment_retrieve_tolerance = 100
+payment_tolerance = 100
+payment_ttl = 172800
+refund_duration = 1000
+refund_retrieve_duration = 500
+refund_retrieve_tolerance = 100
+refund_tolerance = 100
+refund_ttl = 172800
+slack_invite_url = "https://join.slack.com/t/hyperswitch-io/shared_invite/zt-1k6cz4lee-SAJzhz6bjmpp4jZCDOtOIg"
+
+[frm]
+enabled = true
+
+[mandates.supported_payment_methods]
+bank_debit.ach.connector_list = "gocardless"
+bank_debit.becs.connector_list = "gocardless"
+bank_debit.sepa.connector_list = "gocardless"
+card.credit.connector_list = "stripe,adyen,authorizedotnet,cybersource,globalpay,worldpay,multisafepay,nmi,nexinets,noon"
+card.debit.connector_list = "stripe,adyen,authorizedotnet,cybersource,globalpay,worldpay,multisafepay,nmi,nexinets,noon"
+pay_later.klarna.connector_list = "adyen"
+wallet.apple_pay.connector_list = "stripe,adyen,cybersource,noon"
+wallet.google_pay.connector_list = "stripe,adyen,cybersource"
+wallet.paypal.connector_list = "adyen"
+bank_redirect.ideal = {connector_list = "stripe,adyen,globalpay"}
+bank_redirect.sofort = {connector_list = "stripe,adyen,globalpay"}
+
+[multiple_api_version_supported_connectors]
+supported_connectors = "braintree"
+
+[payouts]
+payout_eligibility = true
+
+[pm_filters.default]
+ach = { country = "US", currency = "USD" }
+affirm = { country = "US", currency = "USD" }
+afterpay_clearpay = { country = "AU,NZ,ES,GB,FR,IT,CA,US", currency = "GBP" }
+ali_pay = { country = "AU,JP,HK,SG,MY,TH,ES,GB,SE,NO,AT,NL,DE,CY,CH,BE,FR,DK,FI,RO,MT,SI,GR,PT,IE,IT,CA,US", currency = "USD,EUR,GBP,JPY,AUD,SGD,CHF,SEK,NOK,NZD,THB,HKD,CAD" }
+apple_pay = { country = "AU,CN,HK,JP,MO,MY,NZ,SG,TW,AM,AT,AZ,BY,BE,BG,HR,CY,CZ,DK,EE,FO,FI,FR,GE,DE,GR,GL,GG,HU,IS,IE,IM,IT,KZ,JE,LV,LI,LT,LU,MT,MD,MC,ME,NL,NO,PL,PT,RO,SM,RS,SK,SI,ES,SE,CH,UA,GB,AR,CO,CR,BR,MX,PE,BH,IL,JO,KW,PS,QA,SA,AE,CA,UM,US,KR,VN,MA,ZA,VA,CL,SV,GT,HN,PA", currency = "AED,AUD,CHF,CAD,EUR,GBP,HKD,SGD,USD" }
+bacs = { country = "GB", currency = "GBP" }
+bancontact_card = { country = "BE", currency = "EUR" }
+blik = { country = "PL", currency = "PLN" }
+eps = { country = "AT", currency = "EUR" }
+giropay = { country = "DE", currency = "EUR" }
+google_pay = { country = "AU,NZ,JP,HK,SG,MY,TH,VN,BH,AE,KW,BR,ES,GB,SE,NO,SK,AT,NL,DE,HU,CY,LU,CH,BE,FR,DK,RO,HR,LI,MT,SI,GR,PT,IE,CZ,EE,LT,LV,IT,PL,TR,IS,CA,US", currency = "AED,ALL,AMD,ANG,AOA,ARS,AUD,AWG,AZN,BAM,BBD,BDT,BGN,BHD,BMD,BND,BOB,BRL,BSD,BWP,BYN,BZD,CAD,CHF,CLP,CNY,COP,CRC,CUP,CVE,CZK,DJF,DKK,DOP,DZD,EGP,ETB,EUR,FJD,FKP,GBP,GEL,GHS,GIP,GMD,GNF,GTQ,GYD,HKD,HNL,HTG,HUF,IDR,ILS,INR,IQD,JMD,JOD,JPY,KES,KGS,KHR,KMF,KRW,KWD,KYD,KZT,LAK,LBP,LKR,LYD,MAD,MDL,MKD,MMK,MNT,MOP,MRU,MUR,MVR,MWK,MXN,MYR,MZN,NAD,NGN,NIO,NOK,NPR,NZD,OMR,PAB,PEN,PGK,PHP,PKR,PLN,PYG,QAR,RON,RSD,RUB,RWF,SAR,SBD,SCR,SEK,SGD,SHP,SLE,SOS,SRD,STN,SVC,SZL,THB,TND,TOP,TRY,TTD,TWD,TZS,UAH,UGX,USD,UYU,UZS,VES,VND,VUV,WST,XAF,XCD,XOF,XPF,YER,ZAR,ZMW" }
+ideal = { country = "NL", currency = "EUR" }
+klarna = { country = "AT,ES,GB,SE,NO,AT,NL,DE,CH,BE,FR,DK,FI,PT,IE,IT,PL,CA,US", currency = "USD,GBP,EUR,CHF,DKK,SEK,NOK,AUD,PLN,CAD" }
+mb_way = { country = "PT", currency = "EUR" }
+mobile_pay = { country = "DK,FI", currency = "DKK,SEK,NOK,EUR" }
+online_banking_czech_republic = { country = "CZ", currency = "EUR,CZK" }
+online_banking_finland = { country = "FI", currency = "EUR" }
+online_banking_poland = { country = "PL", currency = "PLN" }
+online_banking_slovakia = { country = "SK", currency = "EUR,CZK" }
+pay_bright = { country = "CA", currency = "CAD" }
+paypal = { country = "AU,NZ,CN,JP,HK,MY,TH,KR,PH,ID,AE,KW,BR,ES,GB,SE,NO,SK,AT,NL,DE,HU,CY,LU,CH,BE,FR,DK,FI,RO,HR,UA,MT,SI,GI,PT,IE,CZ,EE,LT,LV,IT,PL,IS,CA,US", currency = "AUD,BRL,CAD,CZK,DKK,EUR,HKD,HUF,INR,JPY,MYR,MXN,NZD,NOK,PHP,PLN,RUB,GBP,SGD,SEK,CHF,THB,USD" }
+sepa = { country = "ES,SK,AT,NL,DE,BE,FR,FI,PT,IE,EE,LT,LV,IT", currency = "EUR" }
+sofort = { country = "ES,GB,SE,AT,NL,DE,CH,BE,FR,FI,IT,PL", currency = "EUR" }
+trustly = { country = "ES,GB,SE,NO,AT,NL,DE,DK,FI,EE,LT,LV", currency = "CZK,DKK,EUR,GBP,NOK,SEK" }
+walley = { country = "SE,NO,DK,FI", currency = "DKK,EUR,NOK,SEK" }
+we_chat_pay = { country = "AU,NZ,CN,JP,HK,SG,ES,GB,SE,NO,AT,NL,DE,CY,CH,BE,FR,DK,LI,MT,SI,GR,PT,IT,CA,US", currency = "AUD,CAD,CNY,EUR,GBP,HKD,JPY,NZD,SGD,USD" }
+
+[pm_filters.adyen]
+ach = { country = "US", currency = "USD" }
+affirm = { country = "US", currency = "USD" }
+afterpay_clearpay = { country = "AU,NZ,ES,GB,FR,IT,CA,US", currency = "GBP" }
+alfamart = { country = "ID", currency = "IDR" }
+ali_pay = { country = "AU,JP,HK,SG,MY,TH,ES,GB,SE,NO,AT,NL,DE,CY,CH,BE,FR,DK,FI,RO,MT,SI,GR,PT,IE,IT,CA,US", currency = "USD,EUR,GBP,JPY,AUD,SGD,CHF,SEK,NOK,NZD,THB,HKD,CAD" }
+ali_pay_hk = { country = "HK", currency = "HKD" }
+alma = { country = "FR", currency = "EUR" }
+apple_pay = { country = "AU,NZ,CN,JP,HK,SG,MY,BH,AE,KW,BR,ES,GB,SE,NO,AT,NL,DE,HU,CY,LU,CH,BE,FR,DK,FI,RO,HR,LI,UA,MT,SI,GR,PT,IE,CZ,EE,LT,LV,IT,PL,IS,CA,US", currency = "AUD,CHF,CAD,EUR,GBP,HKD,SGD,USD" }
+atome = { country = "MY,SG", currency = "MYR,SGD" }
+bacs = { country = "GB", currency = "GBP" }
+bancontact_card = { country = "BE", currency = "EUR" }
+bca_bank_transfer = { country = "ID", currency = "IDR" }
+bizum = { country = "ES", currency = "EUR" }
+blik = { country = "PL", currency = "PLN" }
+bni_va = { country = "ID", currency = "IDR" }
+boleto = { country = "BR", currency = "BRL" }
+bri_va = { country = "ID", currency = "IDR" }
+cimb_va = { country = "ID", currency = "IDR" }
+dana = { country = "ID", currency = "IDR" }
+danamon_va = { country = "ID", currency = "IDR" }
+eps = { country = "AT", currency = "EUR" }
+family_mart = { country = "JP", currency = "JPY" }
+gcash = { country = "PH", currency = "PHP" }
+giropay = { country = "DE", currency = "EUR" }
+go_pay = { country = "ID", currency = "IDR" }
+google_pay = { country = "AU,NZ,JP,HK,SG,MY,TH,VN,BH,AE,KW,BR,ES,GB,SE,NO,SK,AT,NL,DE,HU,CY,LU,CH,BE,FR,DK,RO,HR,LI,MT,SI,GR,PT,IE,CZ,EE,LT,LV,IT,PL,TR,IS,CA,US", currency = "AED,ALL,AMD,ANG,AOA,ARS,AUD,AWG,AZN,BAM,BBD,BDT,BGN,BHD,BMD,BND,BOB,BRL,BSD,BWP,BYN,BZD,CAD,CHF,CLP,CNY,COP,CRC,CUP,CVE,CZK,DJF,DKK,DOP,DZD,EGP,ETB,EUR,FJD,FKP,GBP,GEL,GHS,GIP,GMD,GNF,GTQ,GYD,HKD,HNL,HTG,HUF,IDR,ILS,INR,IQD,JMD,JOD,JPY,KES,KGS,KHR,KMF,KRW,KWD,KYD,KZT,LAK,LBP,LKR,LYD,MAD,MDL,MKD,MMK,MNT,MOP,MRU,MUR,MVR,MWK,MXN,MYR,MZN,NAD,NGN,NIO,NOK,NPR,NZD,OMR,PAB,PEN,PGK,PHP,PKR,PLN,PYG,QAR,RON,RSD,RUB,RWF,SAR,SBD,SCR,SEK,SGD,SHP,SLE,SOS,SRD,STN,SVC,SZL,THB,TND,TOP,TRY,TTD,TWD,TZS,UAH,UGX,USD,UYU,UZS,VES,VND,VUV,WST,XAF,XCD,XOF,XPF,YER,ZAR,ZMW" }
+ideal = { country = "NL", currency = "EUR" }
+indomaret = { country = "ID", currency = "IDR" }
+kakao_pay = { country = "KR", currency = "KRW" }
+klarna = { country = "AT,ES,GB,SE,NO,AT,NL,DE,CH,BE,FR,DK,FI,PT,IE,IT,PL,CA,US", currency = "USD,GBP,EUR,CHF,DKK,SEK,NOK,AUD,PLN,CAD" }
+lawson = { country = "JP", currency = "JPY" }
+mandiri_va = { country = "ID", currency = "IDR" }
+mb_way = { country = "PT", currency = "EUR" }
+mini_stop = { country = "JP", currency = "JPY" }
+mobile_pay = { country = "DK,FI", currency = "DKK,SEK,NOK,EUR" }
+momo = { country = "VN", currency = "VND" }
+momo_atm = { country = "VN", currency = "VND" }
+online_banking_czech_republic = { country = "CZ", currency = "EUR,CZK" }
+online_banking_finland = { country = "FI", currency = "EUR" }
+online_banking_fpx = { country = "MY", currency = "MYR" }
+online_banking_poland = { country = "PL", currency = "PLN" }
+online_banking_slovakia = { country = "SK", currency = "EUR,CZK" }
+online_banking_thailand = { country = "TH", currency = "THB" }
+open_banking_uk = { country = "GB", currency = "GBP" }
+oxxo = { country = "MX", currency = "MXN" }
+pay_bright = { country = "CA", currency = "CAD" }
+pay_easy = { country = "JP", currency = "JPY" }
+pay_safe_card = { country = "AT,AU,BE,BR,BE,CA,HR,CY,CZ,DK,FI,FR,GE,DE,GI,HU,IS,IE,KW,LV,IE,LI,LT,LU,MT,MX,MD,ME,NL,NZ,NO,PY,PE,PL,PT,RO,SA,RS,SK,SI,ES,SE,CH,TR,AE,GB,US,UY", currency = "EUR,AUD,BRL,CAD,CZK,DKK,GEL,GIP,HUF,KWD,CHF,MXN,MDL,NZD,NOK,PYG,PEN,PLN,RON,SAR,RSD,SEK,TRY,AED,GBP,USD,UYU" }
+paypal = { country = "AU,NZ,CN,JP,HK,MY,TH,KR,PH,ID,AE,KW,BR,ES,GB,SE,NO,SK,AT,NL,DE,HU,CY,LU,CH,BE,FR,DK,FI,RO,HR,UA,MT,SI,GI,PT,IE,CZ,EE,LT,LV,IT,PL,IS,CA,US", currency = "AUD,BRL,CAD,CZK,DKK,EUR,HKD,HUF,INR,JPY,MYR,MXN,NZD,NOK,PHP,PLN,RUB,GBP,SGD,SEK,CHF,THB,USD" }
+permata_bank_transfer = { country = "ID", currency = "IDR" }
+seicomart = { country = "JP", currency = "JPY" }
+sepa = { country = "ES,SK,AT,NL,DE,BE,FR,FI,PT,IE,EE,LT,LV,IT", currency = "EUR" }
+seven_eleven = { country = "JP", currency = "JPY" }
+sofort = { country = "ES,GB,SE,AT,NL,DE,CH,BE,FR,FI,IT,PL", currency = "EUR" }
+swish = { country = "SE", currency = "SEK" }
+touch_n_go = { country = "MY", currency = "MYR" }
+trustly = { country = "ES,GB,SE,NO,AT,NL,DE,DK,FI,EE,LT,LV", currency = "CZK,DKK,EUR,GBP,NOK,SEK" }
+twint = { country = "CH", currency = "CHF" }
+vipps = { country = "NO", currency = "NOK" }
+walley = { country = "SE,NO,DK,FI", currency = "DKK,EUR,NOK,SEK" }
+we_chat_pay = { country = "AU,NZ,CN,JP,HK,SG,ES,GB,SE,NO,AT,NL,DE,CY,CH,BE,FR,DK,LI,MT,SI,GR,PT,IT,CA,US", currency = "AUD,CAD,CNY,EUR,GBP,HKD,JPY,NZD,SGD,USD" }
+pix = { country = "BR", currency = "BRL" }
+
+[pm_filters.authorizedotnet]
+google_pay.currency = "CHF,DKK,EUR,GBP,NOK,PLN,SEK,USD,AUD,NZD,CAD"
+paypal.currency = "CHF,DKK,EUR,GBP,NOK,PLN,SEK,USD,AUD,NZD,CAD"
+
+[pm_filters.braintree]
+paypal.currency = "AUD,BRL,CAD,CNY,CZK,DKK,EUR,HKD,HUF,ILS,JPY,MYR,MXN,TWD,NZD,NOK,PHP,PLN,GBP,RUB,SGD,SEK,CHF,THB,USD"
+
+[pm_filters.forte]
+credit.currency = "USD"
+debit.currency = "USD"
+
+[pm_filters.helcim]
+credit.currency = "USD"
+debit.currency = "USD"
+
+[pm_filters.globepay]
+ali_pay.currency = "GBP,CNY"
+we_chat_pay.currency = "GBP,CNY"
+
+[pm_filters.klarna]
+klarna = { country = "AU,AT,BE,CA,CZ,DK,FI,FR,DE,GR,IE,IT,NL,NZ,NO,PL,PT,ES,SE,CH,GB,US", currency = "CHF,DKK,EUR,GBP,NOK,PLN,SEK,USD,AUD,NZD,CAD" }
+
+[pm_filters.prophetpay]
+card_redirect.currency = "USD"
+
+[pm_filters.stax]
+ach = { country = "US", currency = "USD" }
+
+[pm_filters.stripe]
+affirm = { country = "US", currency = "USD" }
+afterpay_clearpay = { country = "US,CA,GB,AU,NZ,FR,ES", currency = "USD,CAD,GBP,AUD,NZD" }
+apple_pay.country = "AU,CN,HK,JP,MO,MY,NZ,SG,TW,AM,AT,AZ,BY,BE,BG,HR,CY,CZ,DK,EE,FO,FI,FR,GE,DE,GR,GL,GG,HU,IS,IE,IM,IT,KZ,JE,LV,LI,LT,LU,MT,MD,MC,ME,NL,NO,PL,PT,RO,SM,RS,SK,SI,ES,SE,CH,UA,GB,AR,CO,CR,BR,MX,PE,BH,IL,JO,KW,PS,QA,SA,AE,CA,UM,US,KR,VN,MA,ZA,VA,CL,SV,GT,HN,PA"
+cashapp = { country = "US", currency = "USD" }
+eps = { country = "AT", currency = "EUR" }
+giropay = { country = "DE", currency = "EUR" }
+google_pay.country = "AL,DZ,AS,AO,AG,AR,AU,AT,AZ,BH,BY,BE,BR,BG,CA,CL,CO,HR,CZ,DK,DO,EG,EE,FI,FR,DE,GR,HK,HU,IN,ID,IE,IL,IT,JP,JO,KZ,KE,KW,LV,LB,LT,LU,MY,MX,NL,NZ,NO,OM,PK,PA,PE,PH,PL,PT,QA,RO,RU,SA,SG,SK,ZA,ES,LK,SE,CH,TW,TH,TR,UA,AE,GB,US,UY,VN"
+ideal = { country = "NL", currency = "EUR" }
+klarna = { country = "AU,AT,BE,CA,CZ,DK,FI,FR,DE,GR,IE,IT,NL,NZ,NO,PL,PT,ES,SE,CH,GB,US", currency = "AUD,CAD,CHF,CZK,DKK,EUR,GBP,NOK,NZD,PLN,SEK,USD" }
+sofort = { country = "AT,BE,DE,IT,NL,ES", currency = "EUR" }
+
+[pm_filters.worldpay]
+apple_pay.country = "AU,CN,HK,JP,MO,MY,NZ,SG,TW,AM,AT,AZ,BY,BE,BG,HR,CY,CZ,DK,EE,FO,FI,FR,GE,DE,GR,GL,GG,HU,IS,IE,IM,IT,KZ,JE,LV,LI,LT,LU,MT,MD,MC,ME,NL,NO,PL,PT,RO,SM,RS,SK,SI,ES,SE,CH,UA,GB,AR,CO,CR,BR,MX,PE,BH,IL,JO,KW,PS,QA,SA,AE,CA,UM,US"
+google_pay.country = "AL,DZ,AS,AO,AG,AR,AU,AT,AZ,BH,BY,BE,BR,BG,CA,CL,CO,HR,CZ,DK,DO,EG,EE,FI,FR,DE,GR,HK,HU,IN,ID,IE,IL,IT,JP,JO,KZ,KE,KW,LV,LB,LT,LU,MY,MX,NL,NZ,NO,OM,PK,PA,PE,PH,PL,PT,QA,RO,RU,SA,SG,SK,ZA,ES,LK,SE,CH,TW,TH,TR,UA,AE,GB,US,UY,VN"
+
+[pm_filters.zen]
+boleto = { country = "BR", currency = "BRL" }
+efecty = { country = "CO", currency = "COP" }
+multibanco = { country = "PT", currency = "EUR" }
+pago_efectivo = { country = "PE", currency = "PEN" }
+pix = { country = "BR", currency = "BRL" }
+pse = { country = "CO", currency = "COP" }
+red_compra = { country = "CL", currency = "CLP" }
+red_pagos = { country = "UY", currency = "UYU" }
+
+
+[temp_locker_enable_config]
+bluesnap.payment_method = "card"
+nuvei.payment_method = "card"
+shift4.payment_method = "card"
+stripe.payment_method = "bank_transfer"
+bankofamerica = { payment_method = "card" }
+cybersource = { payment_method = "card" }
+nmi.payment_method = "card"
+
+[tokenization]
+braintree = { long_lived_token = false, payment_method = "card" }
+checkout = { long_lived_token = false, payment_method = "wallet", apple_pay_pre_decrypt_flow = "network_tokenization" }
+gocardless = { long_lived_token = true, payment_method = "bank_debit" }
+mollie = { long_lived_token = false, payment_method = "card" }
+payme = { long_lived_token = false, payment_method = "card" }
+square = { long_lived_token = false, payment_method = "card" }
+stax = { long_lived_token = true, payment_method = "card,bank_debit" }
+stripe = { long_lived_token = false, payment_method = "wallet", payment_method_type = { list = "google_pay", type = "disable_only" } }
+
+[webhooks]
+outgoing_enabled = true
+
+[webhook_source_verification_call]
+connectors_with_webhook_source_verification_call = "paypal"
diff --git a/config/deployments/scheduler/consumer.toml b/config/deployments/scheduler/consumer.toml
new file mode 100644
index 000000000000..cdd605526689
--- /dev/null
+++ b/config/deployments/scheduler/consumer.toml
@@ -0,0 +1,17 @@
+# Scheduler settings provides a point to modify the behaviour of scheduler flow.
+# It defines the the streams/queues name and configuration as well as event selection variables
+[scheduler]
+consumer_group = "scheduler_group"
+graceful_shutdown_interval = 60000 # Specifies how much time to wait while re-attempting shutdown for a service (in milliseconds)
+loop_interval = 3000 # Specifies how much time to wait before starting the defined behaviour of producer or consumer (in milliseconds)0
+stream = "scheduler_stream"
+
+[scheduler.consumer]
+consumer_group = "scheduler_group"
+disabled = false # This flag decides if the consumer should actively consume task
+
+# Scheduler server configuration
+[scheduler.server]
+port = 3000 # Port on which the server will listen for incoming requests
+host = "127.0.0.1" # Host IP address to bind the server to
+workers = 1 # Number of actix workers to handle incoming requests concurrently
diff --git a/config/deployments/scheduler/producer.toml b/config/deployments/scheduler/producer.toml
new file mode 100644
index 000000000000..9cbaee96f03c
--- /dev/null
+++ b/config/deployments/scheduler/producer.toml
@@ -0,0 +1,20 @@
+# Scheduler settings provides a point to modify the behaviour of scheduler flow.
+# It defines the the streams/queues name and configuration as well as event selection variables
+[scheduler]
+consumer_group = "scheduler_group"
+graceful_shutdown_interval = 60000 # Specifies how much time to wait while re-attempting shutdown for a service (in milliseconds)
+loop_interval = 30000 # Specifies how much time to wait before starting the defined behaviour of producer or consumer (in milliseconds)
+stream = "scheduler_stream"
+
+[scheduler.producer]
+batch_size = 50 # Specifies the batch size the producer will push under a single entry in the redis queue
+lock_key = "producer_locking_key" # The following keys defines the producer lock that is created in redis with
+lock_ttl = 160 # the ttl being the expiry (in seconds)
+lower_fetch_limit = 900 # Lower limit for fetching entries from redis queue (in seconds)
+upper_fetch_limit = 0 # Upper limit for fetching entries from the redis queue (in seconds)0
+
+# Scheduler server configuration
+[scheduler.server]
+port = 3000 # Port on which the server will listen for incoming requests
+host = "127.0.0.1" # Host IP address to bind the server to
+workers = 1 # Number of actix workers to handle incoming requests concurrently
diff --git a/config/development.toml b/config/development.toml
index c82607a704c3..20abb7bd6f30 100644
--- a/config/development.toml
+++ b/config/development.toml
@@ -20,6 +20,7 @@ port = 5432
dbname = "hyperswitch_db"
pool_size = 5
connection_timeout = 10
+min_idle = 2
[replica_database]
username = "db_user"
@@ -30,6 +31,23 @@ dbname = "hyperswitch_db"
pool_size = 5
connection_timeout = 10
+[redis]
+host = "127.0.0.1"
+port = 6379
+pool_size = 5
+reconnect_max_attempts = 5
+reconnect_delay = 5
+default_ttl = 300
+default_hash_ttl = 900
+use_legacy_version = false
+stream_read_count = 1
+auto_pipeline = true
+disable_auto_backpressure = false
+max_in_flight_commands = 5000
+default_command_timeout = 0
+max_feed_count = 200
+
+
[server]
# HTTP Request body limit. Defaults to 32kB
request_body_limit = 32768
@@ -51,14 +69,19 @@ host = ""
host_rs = ""
mock_locker = true
basilisk_host = ""
+locker_enabled = true
+
+
+[forex_api]
+call_delay = 21600
+local_fetch_retry_count = 5
+local_fetch_retry_delay = 1000
+api_timeout = 20000
+api_key = "YOUR API KEY HERE"
+fallback_api_key = "YOUR API KEY HERE"
+redis_lock_timeout = 26000
[jwekey]
-locker_key_identifier1 = ""
-locker_key_identifier2 = ""
-locker_encryption_key1 = ""
-locker_encryption_key2 = ""
-locker_decryption_key1 = ""
-locker_decryption_key2 = ""
vault_encryption_key = ""
rust_locker_encryption_key = ""
vault_private_key = ""
@@ -103,6 +126,7 @@ cards = [
"payme",
"paypal",
"payu",
+ "placetopay",
"powertranz",
"prophetpay",
"shift4",
@@ -175,10 +199,13 @@ payeezy.base_url = "https://api-cert.payeezy.com/"
payme.base_url = "https://sandbox.payme.io/"
paypal.base_url = "https://api-m.sandbox.paypal.com/"
payu.base_url = "https://secure.snd.payu.com/"
+placetopay.base_url = "https://test.placetopay.com/rest/gateway"
powertranz.base_url = "https://staging.ptranz.com/api/"
prophetpay.base_url = "https://ccm-thirdparty.cps.golf/"
rapyd.base_url = "https://sandboxapi.rapyd.net"
+riskified.base_url = "https://sandbox.riskified.com/api"
shift4.base_url = "https://api.shift4.com/"
+signifyd.base_url = "https://api.signifyd.com/"
square.base_url = "https://connect.squareupsandbox.com/"
square.secondary_base_url = "https://pci-connect.squareupsandbox.com/"
stax.base_url = "https://apiprod.fattlabs.com/"
@@ -201,10 +228,21 @@ stream = "SCHEDULER_STREAM"
disabled = false
consumer_group = "SCHEDULER_GROUP"
+[scheduler.server]
+port = 3000
+host = "127.0.0.1"
+workers = 1
+
[email]
-from_email = "notify@example.com"
+sender_email = "example@example.com"
aws_region = ""
-base_url = ""
+base_url = "http://localhost:8080"
+allowed_unverified_days = 1
+active_email_client = "SES"
+
+[email.aws_ses]
+email_role_arn = ""
+sts_role_session_name = ""
[bank_config.eps]
stripe = { banks = "arzte_und_apotheker_bank,austrian_anadi_bank_ag,bank_austria,bankhaus_carl_spangler,bankhaus_schelhammer_und_schattera_ag,bawag_psk_ag,bks_bank_ag,brull_kallmus_bank_ag,btv_vier_lander_bank,capital_bank_grawe_gruppe_ag,dolomitenbank,easybank_ag,erste_bank_und_sparkassen,hypo_alpeadriabank_international_ag,hypo_noe_lb_fur_niederosterreich_u_wien,hypo_oberosterreich_salzburg_steiermark,hypo_tirol_bank_ag,hypo_vorarlberg_bank_ag,hypo_bank_burgenland_aktiengesellschaft,marchfelder_bank,oberbank_ag,raiffeisen_bankengruppe_osterreich,schoellerbank_ag,sparda_bank_wien,volksbank_gruppe,volkskreditbank_ag,vr_bank_braunau" }
@@ -230,7 +268,7 @@ stripe = { banks = "alior_bank,bank_millennium,bank_nowy_bfg_sa,bank_pekao_sa,ba
adyen = { banks = "aib,bank_of_scotland,danske_bank,first_direct,first_trust,halifax,lloyds,monzo,nat_west,nationwide_bank,royal_bank_of_scotland,starling,tsb_bank,tesco_bank,ulster_bank,barclays,hsbc_bank,revolut,santander_przelew24,open_bank_success,open_bank_failure,open_bank_cancelled"}
[bank_config.online_banking_fpx]
-adyen.banks = "affin_bank,agro_bank,alliance_bank,am_bank,bank_islam,bank_muamalat,bank_rakyat,bank_simpanan_nasional,cimb_bank,hong_leong_bank,hsbc_bank,kuwait_finance_house,may_bank,ocbc_bank,public_bank,rhb_bank,standard_chartered_bank,uob_bank"
+adyen.banks = "affin_bank,agro_bank,alliance_bank,am_bank,bank_islam,bank_muamalat,bank_rakyat,bank_simpanan_nasional,cimb_bank,hong_leong_bank,hsbc_bank,kuwait_finance_house,maybank,ocbc_bank,public_bank,rhb_bank,standard_chartered_bank,uob_bank"
[bank_config.online_banking_thailand]
adyen.banks = "bangkok_bank,krungsri_bank,krung_thai_bank,the_siam_commercial_bank,kasikorn_bank"
@@ -260,31 +298,31 @@ ideal = { country = "NL", currency = "EUR" }
cashapp = { country = "US", currency = "USD" }
[pm_filters.adyen]
-google_pay = { country = "AU,NZ,JP,HK,SG,MY,TH,VN,BH,AE,KW,BR,ES,UK,SE,NO,SK,AT,NL,DE,HU,CY,LU,CH,BE,FR,DK,RO,HR,LI,MT,SI,GR,PT,IE,CZ,EE,LT,LV,IT,PL,TR,IS,CA,US", currency = "AED,ALL,AMD,ANG,AOA,ARS,AUD,AWG,AZN,BAM,BBD,BDT,BGN,BHD,BMD,BND,BOB,BRL,BSD,BWP,BYN,BZD,CAD,CHF,CLP,CNY,COP,CRC,CUP,CVE,CZK,DJF,DKK,DOP,DZD,EGP,ETB,EUR,FJD,FKP,GBP,GEL,GHS,GIP,GMD,GNF,GTQ,GYD,HKD,HNL,HTG,HUF,IDR,ILS,INR,IQD,ISK,JMD,JOD,JPY,KES,KGS,KHR,KMF,KRW,KWD,KYD,KZT,LAK,LBP,LKR,LYD,MAD,MDL,MKD,MMK,MNT,MOP,MRU,MUR,MVR,MWK,MXN,MYR,MZN,NAD,NGN,NIO,NOK,NPR,NZD,OMR,PAB,PEN,PGK,PHP,PKR,PLN,PYG,QAR,RON,RSD,RUB,RWF,SAR,SBD,SCR,SEK,SGD,SHP,SLE,SOS,SRD,STN,SVC,SZL,THB,TND,TOP,TRY,TTD,TWD,TZS,UAH,UGX,USD,UYU,UZS,VEF,VND,VUV,WST,XAF,XCD,XOF,XPF,YER,ZAR,ZMW" }
-apple_pay = { country = "AU,NZ,CN,JP,HK,SG,MY,BH,AE,KW,BR,ES,UK,SE,NO,AK,AT,NL,DE,HU,CY,LU,CH,BE,FR,DK,FI,RO,HR,LI,UA,MT,SI,GR,PT,IE,CZ,EE,LT,LV,IT,PL,IS,CA,US", currency = "AUD,CHF,CAD,EUR,GBP,HKD,SGD,USD" }
-paypal = { country = "AU,NZ,CN,JP,HK,MY,TH,KR,PH,ID,AE,KW,BR,ES,UK,SE,NO,SK,AT,NL,DE,HU,CY,LU,CH,BE,FR,DK,FI,RO,HR,UA,MT,SI,GI,PT,IE,CZ,EE,LT,LV,IT,PL,IS,CA,US", currency = "AUD,BRL,CAD,CZK,DKK,EUR,HKD,HUF,INR,JPY,MYR,MXN,NZD,NOK,PHP,PLN,RUB,GBP,SGD,SEK,CHF,THB,USD" }
+google_pay = { country = "AU,NZ,JP,HK,SG,MY,TH,VN,BH,AE,KW,BR,ES,GB,SE,NO,SK,AT,NL,DE,HU,CY,LU,CH,BE,FR,DK,RO,HR,LI,MT,SI,GR,PT,IE,CZ,EE,LT,LV,IT,PL,TR,IS,CA,US", currency = "AED,ALL,AMD,ANG,AOA,ARS,AUD,AWG,AZN,BAM,BBD,BDT,BGN,BHD,BMD,BND,BOB,BRL,BSD,BWP,BYN,BZD,CAD,CHF,CLP,CNY,COP,CRC,CUP,CVE,CZK,DJF,DKK,DOP,DZD,EGP,ETB,EUR,FJD,FKP,GBP,GEL,GHS,GIP,GMD,GNF,GTQ,GYD,HKD,HNL,HTG,HUF,IDR,ILS,INR,IQD,JMD,JOD,JPY,KES,KGS,KHR,KMF,KRW,KWD,KYD,KZT,LAK,LBP,LKR,LYD,MAD,MDL,MKD,MMK,MNT,MOP,MRU,MUR,MVR,MWK,MXN,MYR,MZN,NAD,NGN,NIO,NOK,NPR,NZD,OMR,PAB,PEN,PGK,PHP,PKR,PLN,PYG,QAR,RON,RSD,RUB,RWF,SAR,SBD,SCR,SEK,SGD,SHP,SLE,SOS,SRD,STN,SVC,SZL,THB,TND,TOP,TRY,TTD,TWD,TZS,UAH,UGX,USD,UYU,UZS,VES,VND,VUV,WST,XAF,XCD,XOF,XPF,YER,ZAR,ZMW" }
+apple_pay = { country = "AU,NZ,CN,JP,HK,SG,MY,BH,AE,KW,BR,ES,GB,SE,NO,AT,NL,DE,HU,CY,LU,CH,BE,FR,DK,FI,RO,HR,LI,UA,MT,SI,GR,PT,IE,CZ,EE,LT,LV,IT,PL,IS,CA,US", currency = "AUD,CHF,CAD,EUR,GBP,HKD,SGD,USD" }
+paypal = { country = "AU,NZ,CN,JP,HK,MY,TH,KR,PH,ID,AE,KW,BR,ES,GB,SE,NO,SK,AT,NL,DE,HU,CY,LU,CH,BE,FR,DK,FI,RO,HR,UA,MT,SI,GI,PT,IE,CZ,EE,LT,LV,IT,PL,IS,CA,US", currency = "AUD,BRL,CAD,CZK,DKK,EUR,HKD,HUF,INR,JPY,MYR,MXN,NZD,NOK,PHP,PLN,RUB,GBP,SGD,SEK,CHF,THB,USD" }
mobile_pay = { country = "DK,FI", currency = "DKK,SEK,NOK,EUR" }
-ali_pay = { country = "AU,N,JP,HK,SG,MY,TH,ES,UK,SE,NO,AT,NL,DE,CY,CH,BE,FR,DK,FI,RO,MT,SI,GR,PT,IE,IT,CA,US", currency = "USD,EUR,GBP,JPY,AUD,SGD,CHF,SEK,NOK,NZD,THB,HKD,CAD" }
-we_chat_pay = { country = "AU,NZ,CN,JP,HK,SG,ES,UK,SE,NO,AT,NL,DE,CY,CH,BE,FR,DK,LI,MT,SI,GR,PT,IT,CA,US", currency = "AUD,CAD,CNY,EUR,GBP,HKD,JPY,NZD,SGD,USD" }
+ali_pay = { country = "AU,JP,HK,SG,MY,TH,ES,GB,SE,NO,AT,NL,DE,CY,CH,BE,FR,DK,FI,RO,MT,SI,GR,PT,IE,IT,CA,US", currency = "USD,EUR,GBP,JPY,AUD,SGD,CHF,SEK,NOK,NZD,THB,HKD,CAD" }
+we_chat_pay = { country = "AU,NZ,CN,JP,HK,SG,ES,GB,SE,NO,AT,NL,DE,CY,CH,BE,FR,DK,LI,MT,SI,GR,PT,IT,CA,US", currency = "AUD,CAD,CNY,EUR,GBP,HKD,JPY,NZD,SGD,USD" }
mb_way = { country = "PT", currency = "EUR" }
-klarna = { country = "AT,ES,UK,SE,NO,AT,NL,DE,CH,BE,FR,DK,FI,PT,IE,IT,PL,CA,US", currency = "USD,GBP,EUR,CHF,DKK,SEK,NOK,AUD,PLN,CAD" }
+klarna = { country = "AT,ES,GB,SE,NO,AT,NL,DE,CH,BE,FR,DK,FI,PT,IE,IT,PL,CA,US", currency = "USD,GBP,EUR,CHF,DKK,SEK,NOK,AUD,PLN,CAD" }
affirm = { country = "US", currency = "USD" }
-afterpay_clearpay = { country = "AU,NZ,ES,UK,FR,IT,CA,US", currency = "GBP" }
+afterpay_clearpay = { country = "AU,NZ,ES,GB,FR,IT,CA,US", currency = "GBP" }
pay_bright = { country = "CA", currency = "CAD" }
walley = { country = "SE,NO,DK,FI", currency = "DKK,EUR,NOK,SEK" }
giropay = { country = "DE", currency = "EUR" }
eps = { country = "AT", currency = "EUR" }
-sofort = { country = "ES,UK,SE,AT,NL,DE,CH,BE,FR,FI,IT,PL", currency = "EUR" }
+sofort = { country = "ES,GB,SE,AT,NL,DE,CH,BE,FR,FI,IT,PL", currency = "EUR" }
ideal = { country = "NL", currency = "EUR" }
blik = {country = "PL", currency = "PLN"}
-trustly = {country = "ES,UK,SE,NO,AT,NL,DE,DK,FI,EE,LT,LV", currency = "CZK,DKK,EUR,GBP,NOK,SEK"}
+trustly = {country = "ES,GB,SE,NO,AT,NL,DE,DK,FI,EE,LT,LV", currency = "CZK,DKK,EUR,GBP,NOK,SEK"}
online_banking_czech_republic = {country = "CZ", currency = "EUR,CZK"}
online_banking_finland = {country = "FI", currency = "EUR"}
online_banking_poland = {country = "PL", currency = "PLN"}
online_banking_slovakia = {country = "SK", currency = "EUR,CZK"}
bancontact_card = {country = "BE", currency = "EUR"}
ach = {country = "US", currency = "USD"}
-bacs = {country = "UK", currency = "GBP"}
+bacs = {country = "GB", currency = "GBP"}
sepa = {country = "ES,SK,AT,NL,DE,BE,FR,FI,PT,IE,EE,LT,LV,IT", currency = "EUR"}
ali_pay_hk = {country = "HK", currency = "HKD"}
bizum = {country = "ES", currency = "EUR"}
@@ -308,19 +346,24 @@ alfamart = {country = "ID", currency = "IDR"}
indomaret = {country = "ID", currency = "IDR"}
open_banking_uk = {country = "GB", currency = "GBP"}
oxxo = {country = "MX", currency = "MXN"}
-pay_safe_card = {country = "AT,AU,BE,BR,BE,CA,HR,CY,CZ,DK,FI,FR,GE,DE,GI,HU,IS,IE,KW,LV,IE,LI,LT,LU,MT,MX,MD,ME,NL,NZ,NO,PY,PE,PL,PT,RO,SA,RS,SK,SI,ES,SE,CH,TR,UAE,UK,US,UY", currency = "EUR,AUD,BRL,CAD,CZK,DKK,GEL,GIP,HUF,ISK,KWD,CHF,MXN,MDL,NZD,NOK,PYG,PEN,PLN,RON,SAR,RSD,SEK,TRY,AED,GBP,USD,UYU"}
+pay_safe_card = {country = "AT,AU,BE,BR,BE,CA,HR,CY,CZ,DK,FI,FR,GE,DE,GI,HU,IS,IE,KW,LV,IE,LI,LT,LU,MT,MX,MD,ME,NL,NZ,NO,PY,PE,PL,PT,RO,SA,RS,SK,SI,ES,SE,CH,TR,AE,GB,US,UY", currency = "EUR,AUD,BRL,CAD,CZK,DKK,GEL,GIP,HUF,KWD,CHF,MXN,MDL,NZD,NOK,PYG,PEN,PLN,RON,SAR,RSD,SEK,TRY,AED,GBP,USD,UYU"}
seven_eleven = {country = "JP", currency = "JPY"}
lawson = {country = "JP", currency = "JPY"}
mini_stop = {country = "JP", currency = "JPY"}
family_mart = {country = "JP", currency = "JPY"}
seicomart = {country = "JP", currency = "JPY"}
pay_easy = {country = "JP", currency = "JPY"}
+pix = { country = "BR", currency = "BRL" }
[pm_filters.braintree]
paypal = { currency = "AUD,BRL,CAD,CNY,CZK,DKK,EUR,HKD,HUF,ILS,JPY,MYR,MXN,TWD,NZD,NOK,PHP,PLN,GBP,RUB,SGD,SEK,CHF,THB,USD" }
credit = { not_available_flows = { capture_method = "manual" } }
debit = { not_available_flows = { capture_method = "manual" } }
+[pm_filters.helcim]
+credit = { currency = "USD" }
+debit = { currency = "USD" }
+
[pm_filters.klarna]
klarna = { country = "AU,AT,BE,CA,CZ,DK,FI,FR,DE,GR,IE,IT,NL,NZ,NO,PL,PT,ES,SE,CH,GB,US", currency = "AUD,EUR,EUR,CAD,CZK,DKK,EUR,EUR,EUR,EUR,EUR,EUR,EUR,NZD,NOK,PLN,EUR,EUR,SEK,CHF,GBP,USD" }
credit = { not_available_flows = { capture_method = "manual" } }
@@ -380,7 +423,7 @@ debit = { currency = "USD" }
[tokenization]
stripe = { long_lived_token = false, payment_method = "wallet", payment_method_type = { type = "disable_only", list = "google_pay" } }
-checkout = { long_lived_token = false, payment_method = "wallet" }
+checkout = { long_lived_token = false, payment_method = "wallet", apple_pay_pre_decrypt_flow = "network_tokenization" }
stax = { long_lived_token = true, payment_method = "card,bank_debit" }
mollie = {long_lived_token = false, payment_method = "card"}
square = {long_lived_token = false, payment_method = "card"}
@@ -393,6 +436,10 @@ stripe = {payment_method = "bank_transfer"}
nuvei = {payment_method = "card"}
shift4 = {payment_method = "card"}
bluesnap = {payment_method = "card"}
+bankofamerica = {payment_method = "card"}
+cybersource = {payment_method = "card"}
+nmi = {payment_method = "card"}
+payme = {payment_method = "card"}
[connector_customer]
connector_list = "gocardless,stax,stripe"
@@ -426,14 +473,16 @@ connectors_with_webhook_source_verification_call = "paypal"
[mandates.supported_payment_methods]
pay_later.klarna = { connector_list = "adyen" }
-wallet.google_pay = { connector_list = "stripe,adyen" }
-wallet.apple_pay = { connector_list = "stripe,adyen" }
+wallet.google_pay = { connector_list = "stripe,adyen,cybersource" }
+wallet.apple_pay = { connector_list = "stripe,adyen,cybersource,noon" }
wallet.paypal = { connector_list = "adyen" }
-card.credit = { connector_list = "stripe,adyen,authorizedotnet,globalpay,worldpay,multisafepay,nmi,nexinets,noon" }
-card.debit = { connector_list = "stripe,adyen,authorizedotnet,globalpay,worldpay,multisafepay,nmi,nexinets,noon" }
+card.credit = { connector_list = "stripe,adyen,authorizedotnet,cybersource,globalpay,worldpay,multisafepay,nmi,nexinets,noon" }
+card.debit = { connector_list = "stripe,adyen,authorizedotnet,cybersource,globalpay,worldpay,multisafepay,nmi,nexinets,noon" }
bank_debit.ach = { connector_list = "gocardless"}
bank_debit.becs = { connector_list = "gocardless"}
bank_debit.sepa = { connector_list = "gocardless"}
+bank_redirect.ideal = {connector_list = "stripe,adyen,globalpay"}
+bank_redirect.sofort = {connector_list = "stripe,adyen,globalpay"}
[connector_request_reference_id_config]
merchant_ids_send_payment_id_as_connector_request_id = []
@@ -451,7 +500,11 @@ apple_pay_merchant_cert = "APPLE_PAY_MERCHNAT_CERTIFICATE"
apple_pay_merchant_cert_key = "APPLE_PAY_MERCHNAT_CERTIFICATE_KEY"
[payment_link]
-sdk_url = "http://localhost:9090/dist/HyperLoader.js"
+sdk_url = "http://localhost:9050/HyperLoader.js"
+
+[payment_method_auth]
+redis_expiry = 900
+pm_auth_key = "Some_pm_auth_key"
[lock_settings]
redis_lock_expiry_seconds = 180 # 3 * 60 seconds
@@ -459,3 +512,46 @@ delay_between_retries_in_milliseconds = 500
[kv_config]
ttl = 900 # 15 * 60 seconds
+
+[frm]
+enabled = true
+
+[events]
+source = "logs"
+
+[events.kafka]
+brokers = ["localhost:9092"]
+intent_analytics_topic = "hyperswitch-payment-intent-events"
+attempt_analytics_topic = "hyperswitch-payment-attempt-events"
+refund_analytics_topic = "hyperswitch-refund-events"
+api_logs_topic = "hyperswitch-api-log-events"
+connector_logs_topic = "hyperswitch-connector-api-events"
+outgoing_webhook_logs_topic = "hyperswitch-outgoing-webhook-events"
+
+[analytics]
+source = "sqlx"
+
+[analytics.clickhouse]
+username = "default"
+# password = ""
+host = "http://localhost:8123"
+database_name = "default"
+
+[analytics.sqlx]
+username = "db_user"
+password = "db_pass"
+host = "localhost"
+port = 5432
+dbname = "hyperswitch_db"
+pool_size = 5
+connection_timeout = 10
+queue_strategy = "Fifo"
+
+[connector_onboarding.paypal]
+client_id = ""
+client_secret = ""
+partner_id = ""
+enabled = true
+
+[file_storage]
+file_storage_backend = "file_system"
diff --git a/config/docker_compose.toml b/config/docker_compose.toml
index a5294546de41..e6dc01afa741 100644
--- a/config/docker_compose.toml
+++ b/config/docker_compose.toml
@@ -15,7 +15,7 @@ level = "DEBUG" # What you see in your terminal.
[log.telemetry]
traces_enabled = false # Whether traces are enabled.
-metrics_enabled = false # Whether metrics are enabled.
+metrics_enabled = true # Whether metrics are enabled.
ignore_errors = false # Whether to ignore errors during traces or metrics pipeline setup.
otel_exporter_otlp_endpoint = "https://otel-collector:4317" # Endpoint to send metrics and traces to.
use_xray_generator = false
@@ -28,6 +28,15 @@ port = 5432
dbname = "hyperswitch_db"
pool_size = 5
+[forex_api]
+call_delay = 21600
+local_fetch_retry_count = 5
+local_fetch_retry_delay = 1000
+api_timeout = 20000
+api_key = "YOUR API KEY HERE"
+fallback_api_key = "YOUR API KEY HERE"
+redis_lock_timeout = 26000
+
[replica_database]
username = "db_user"
password = "db_pass"
@@ -47,14 +56,9 @@ host = ""
host_rs = ""
mock_locker = true
basilisk_host = ""
+locker_enabled = true
[jwekey]
-locker_key_identifier1 = ""
-locker_key_identifier2 = ""
-locker_encryption_key1 = ""
-locker_encryption_key2 = ""
-locker_decryption_key1 = ""
-locker_decryption_key2 = ""
vault_encryption_key = ""
rust_locker_encryption_key = ""
vault_private_key = ""
@@ -64,6 +68,19 @@ host = "redis-standalone"
port = 6379
cluster_enabled = false
cluster_urls = ["redis-cluster:6379"]
+pool_size = 5
+reconnect_max_attempts = 5
+reconnect_delay = 5
+default_ttl = 300
+default_hash_ttl = 900
+use_legacy_version = false
+stream_read_count = 1
+auto_pipeline = true
+disable_auto_backpressure = false
+max_in_flight_commands = 5000
+default_command_timeout = 0
+max_feed_count = 200
+
[refund]
max_attempts = 10
@@ -116,10 +133,13 @@ payeezy.base_url = "https://api-cert.payeezy.com/"
payme.base_url = "https://sandbox.payme.io/"
paypal.base_url = "https://api-m.sandbox.paypal.com/"
payu.base_url = "https://secure.snd.payu.com/"
+placetopay.base_url = "https://test.placetopay.com/rest/gateway"
powertranz.base_url = "https://staging.ptranz.com/api/"
prophetpay.base_url = "https://ccm-thirdparty.cps.golf/"
rapyd.base_url = "https://sandboxapi.rapyd.net"
+riskified.base_url = "https://sandbox.riskified.com/api"
shift4.base_url = "https://api.shift4.com/"
+signifyd.base_url = "https://api.signifyd.com/"
square.base_url = "https://connect.squareupsandbox.com/"
square.secondary_base_url = "https://pci-connect.squareupsandbox.com/"
stax.base_url = "https://apiprod.fattlabs.com/"
@@ -178,6 +198,7 @@ cards = [
"payme",
"paypal",
"payu",
+ "placetopay",
"powertranz",
"prophetpay",
"shift4",
@@ -206,10 +227,15 @@ stream = "SCHEDULER_STREAM"
disabled = false
consumer_group = "SCHEDULER_GROUP"
+[scheduler.server]
+port = 3000
+host = "127.0.0.1"
+workers = 1
+
#tokenization configuration which describe token lifetime and payment method for specific connector
[tokenization]
stripe = { long_lived_token = false, payment_method = "wallet", payment_method_type = { type = "disable_only", list = "google_pay" } }
-checkout = { long_lived_token = false, payment_method = "wallet" }
+checkout = { long_lived_token = false, payment_method = "wallet", apple_pay_pre_decrypt_flow = "network_tokenization" }
mollie = {long_lived_token = false, payment_method = "card"}
stax = { long_lived_token = true, payment_method = "card,bank_debit" }
square = {long_lived_token = false, payment_method = "card"}
@@ -221,6 +247,10 @@ stripe = {payment_method = "bank_transfer"}
nuvei = {payment_method = "card"}
shift4 = {payment_method = "card"}
bluesnap = {payment_method = "card"}
+bankofamerica = {payment_method = "card"}
+cybersource = {payment_method = "card"}
+nmi = {payment_method = "card"}
+payme = {payment_method = "card"}
[dummy_connector]
enabled = true
@@ -262,7 +292,7 @@ alfamart = {country = "ID", currency = "IDR"}
indomaret = {country = "ID", currency = "IDR"}
open_banking_uk = {country = "GB", currency = "GBP"}
oxxo = {country = "MX", currency = "MXN"}
-pay_safe_card = {country = "AT,AU,BE,BR,BE,CA,HR,CY,CZ,DK,FI,FR,GE,DE,GI,HU,IS,IE,KW,LV,IE,LI,LT,LU,MT,MX,MD,ME,NL,NZ,NO,PY,PE,PL,PT,RO,SA,RS,SK,SI,ES,SE,CH,TR,UAE,UK,US,UY", currency = "EUR,AUD,BRL,CAD,CZK,DKK,GEL,GIP,HUF,ISK,KWD,CHF,MXN,MDL,NZD,NOK,PYG,PEN,PLN,RON,SAR,RSD,SEK,TRY,AED,GBP,USD,UYU"}
+pay_safe_card = {country = "AT,AU,BE,BR,BE,CA,HR,CY,CZ,DK,FI,FR,GE,DE,GI,HU,IS,IE,KW,LV,IE,LI,LT,LU,MT,MX,MD,ME,NL,NZ,NO,PY,PE,PL,PT,RO,SA,RS,SK,SI,ES,SE,CH,TR,AE,GB,US,UY", currency = "EUR,AUD,BRL,CAD,CZK,DKK,GEL,GIP,HUF,KWD,CHF,MXN,MDL,NZD,NOK,PYG,PEN,PLN,RON,SAR,RSD,SEK,TRY,AED,GBP,USD,UYU"}
seven_eleven = {country = "JP", currency = "JPY"}
lawson = {country = "JP", currency = "JPY"}
mini_stop = {country = "JP", currency = "JPY"}
@@ -288,13 +318,17 @@ cashapp = {country = "US", currency = "USD"}
[pm_filters.prophetpay]
card_redirect = { currency = "USD" }
+[pm_filters.helcim]
+credit = { currency = "USD" }
+debit = { currency = "USD" }
+
[pm_filters.stax]
credit = { currency = "USD" }
debit = { currency = "USD" }
ach = { currency = "USD" }
[bank_config.online_banking_fpx]
-adyen.banks = "affin_bank,agro_bank,alliance_bank,am_bank,bank_islam,bank_muamalat,bank_rakyat,bank_simpanan_nasional,cimb_bank,hong_leong_bank,hsbc_bank,kuwait_finance_house,may_bank,ocbc_bank,public_bank,rhb_bank,standard_chartered_bank,uob_bank"
+adyen.banks = "affin_bank,agro_bank,alliance_bank,am_bank,bank_islam,bank_muamalat,bank_rakyat,bank_simpanan_nasional,cimb_bank,hong_leong_bank,hsbc_bank,kuwait_finance_house,maybank,ocbc_bank,public_bank,rhb_bank,standard_chartered_bank,uob_bank"
[bank_config.online_banking_thailand]
adyen.banks = "bangkok_bank,krungsri_bank,krung_thai_bank,the_siam_commercial_bank,kasikorn_bank"
@@ -305,13 +339,15 @@ adyen = { banks = "aib,bank_of_scotland,danske_bank,first_direct,first_trust,hal
[mandates.supported_payment_methods]
pay_later.klarna = {connector_list = "adyen"}
wallet.google_pay = {connector_list = "stripe,adyen"}
-wallet.apple_pay = {connector_list = "stripe,adyen"}
+wallet.apple_pay = {connector_list = "stripe,adyen,cybersource,noon"}
wallet.paypal = {connector_list = "adyen"}
-card.credit = {connector_list = "stripe,adyen,authorizedotnet,globalpay,worldpay,multisafepay,nmi,nexinets,noon"}
-card.debit = {connector_list = "stripe,adyen,authorizedotnet,globalpay,worldpay,multisafepay,nmi,nexinets,noon"}
+card.credit = {connector_list = "stripe,adyen,authorizedotnet,cybersource,globalpay,worldpay,multisafepay,nmi,nexinets,noon"}
+card.debit = {connector_list = "stripe,adyen,authorizedotnet,cybersource,globalpay,worldpay,multisafepay,nmi,nexinets,noon"}
bank_debit.ach = { connector_list = "gocardless"}
bank_debit.becs = { connector_list = "gocardless"}
bank_debit.sepa = { connector_list = "gocardless"}
+bank_redirect.ideal = {connector_list = "stripe,adyen,globalpay"}
+bank_redirect.sofort = {connector_list = "stripe,adyen,globalpay"}
[connector_customer]
connector_list = "gocardless,stax,stripe"
@@ -320,20 +356,56 @@ payout_connector_list = "wise"
[multiple_api_version_supported_connectors]
supported_connectors = "braintree"
+[payment_method_auth]
+redis_expiry = 900
+pm_auth_key = "Some_pm_auth_key"
+
[lock_settings]
redis_lock_expiry_seconds = 180 # 3 * 60 seconds
delay_between_retries_in_milliseconds = 500
+[events.kafka]
+brokers = ["localhost:9092"]
+intent_analytics_topic = "hyperswitch-payment-intent-events"
+attempt_analytics_topic = "hyperswitch-payment-attempt-events"
+refund_analytics_topic = "hyperswitch-refund-events"
+api_logs_topic = "hyperswitch-api-log-events"
+connector_logs_topic = "hyperswitch-connector-api-events"
+outgoing_webhook_logs_topic = "hyperswitch-outgoing-webhook-events"
+
[analytics]
source = "sqlx"
+[analytics.clickhouse]
+username = "default"
+# password = ""
+host = "http://localhost:8123"
+database_name = "default"
+
[analytics.sqlx]
username = "db_user"
password = "db_pass"
-host = "pg"
+host = "localhost"
port = 5432
dbname = "hyperswitch_db"
pool_size = 5
+connection_timeout = 10
+queue_strategy = "Fifo"
[kv_config]
ttl = 900 # 15 * 60 seconds
+
+[frm]
+enabled = true
+
+[connector_onboarding.paypal]
+client_id = ""
+client_secret = ""
+partner_id = ""
+enabled = true
+
+[events]
+source = "logs"
+
+[file_storage]
+file_storage_backend = "file_system"
diff --git a/connector-template/mod.rs b/connector-template/mod.rs
index 7f21962109de..c64ce431968c 100644
--- a/connector-template/mod.rs
+++ b/connector-template/mod.rs
@@ -15,6 +15,7 @@ use crate::{
self,
api::{self, ConnectorCommon, ConnectorCommonExt},
ErrorResponse, Response,
+ RequestContent
}
};
@@ -106,6 +107,7 @@ impl ConnectorCommon for {{project-name | downcase | pascal_case}} {
message: response.message,
reason: response.reason,
attempt_status: None,
+ connector_transaction_id: None,
})
}
}
@@ -157,7 +159,7 @@ impl
Err(errors::ConnectorError::NotImplemented("get_url method".to_string()).into())
}
- fn get_request_body(&self, req: &types::PaymentsAuthorizeRouterData, _connectors: &settings::Connectors,) -> CustomResult, errors::ConnectorError> {
+ fn get_request_body(&self, req: &types::PaymentsAuthorizeRouterData, _connectors: &settings::Connectors,) -> CustomResult {
let connector_router_data =
{{project-name | downcase}}::{{project-name | downcase | pascal_case}}RouterData::try_from((
&self.get_currency_unit(),
@@ -165,10 +167,8 @@ impl
req.request.amount,
req,
))?;
- let req_obj = {{project-name | downcase}}::{{project-name | downcase | pascal_case}}PaymentsRequest::try_from(&connector_router_data)?;
- let {{project-name | downcase}}_req = types::RequestBody::log_and_get_request_body(&req_obj, utils::Encode::<{{project-name | downcase}}::{{project-name | downcase | pascal_case}}PaymentsRequest>::encode_to_string_of_json)
- .change_context(errors::ConnectorError::RequestEncodingFailed)?;
- Ok(Some({{project-name | downcase}}_req))
+ let connector_req = {{project-name | downcase}}::{{project-name | downcase | pascal_case}}PaymentsRequest::try_from(&connector_router_data)?;
+ Ok(RequestContent::Json(Box::new(connector_req)))
}
fn build_request(
@@ -186,7 +186,7 @@ impl
.headers(types::PaymentsAuthorizeType::get_headers(
self, req, connectors,
)?)
- .body(types::PaymentsAuthorizeType::get_request_body(self, req, connectors)?)
+ .set_body(types::PaymentsAuthorizeType::get_request_body(self, req, connectors)?)
.build(),
))
}
@@ -303,7 +303,7 @@ impl
&self,
_req: &types::PaymentsCaptureRouterData,
_connectors: &settings::Connectors,
- ) -> CustomResult, errors::ConnectorError> {
+ ) -> CustomResult {
Err(errors::ConnectorError::NotImplemented("get_request_body method".to_string()).into())
}
@@ -320,7 +320,7 @@ impl
.headers(types::PaymentsCaptureType::get_headers(
self, req, connectors,
)?)
- .body(types::PaymentsCaptureType::get_request_body(self, req, connectors)?)
+ .set_body(types::PaymentsCaptureType::get_request_body(self, req, connectors)?)
.build(),
))
}
@@ -375,7 +375,7 @@ impl
Err(errors::ConnectorError::NotImplemented("get_url method".to_string()).into())
}
- fn get_request_body(&self, req: &types::RefundsRouterData, _connectors: &settings::Connectors,) -> CustomResult, errors::ConnectorError> {
+ fn get_request_body(&self, req: &types::RefundsRouterData, _connectors: &settings::Connectors,) -> CustomResult {
let connector_router_data =
{{project-name | downcase}}::{{project-name | downcase | pascal_case}}RouterData::try_from((
&self.get_currency_unit(),
@@ -383,10 +383,8 @@ impl
req.request.refund_amount,
req,
))?;
- let req_obj = {{project-name | downcase}}::{{project-name | downcase | pascal_case}}RefundRequest::try_from(&connector_router_data)?;
- let {{project-name | downcase}}_req = types::RequestBody::log_and_get_request_body(&req_obj, utils::Encode::<{{project-name | downcase}}::{{project-name | downcase | pascal_case}}RefundRequest>::encode_to_string_of_json)
- .change_context(errors::ConnectorError::RequestEncodingFailed)?;
- Ok(Some({{project-name | downcase}}_req))
+ let connector_req = {{project-name | downcase}}::{{project-name | downcase | pascal_case}}RefundRequest::try_from(&connector_router_data)?;
+ Ok(RequestContent::Json(Box::new(connector_req)))
}
fn build_request(&self, req: &types::RefundsRouterData, connectors: &settings::Connectors,) -> CustomResult,errors::ConnectorError> {
@@ -395,7 +393,7 @@ impl
.url(&types::RefundExecuteType::get_url(self, req, connectors)?)
.attach_default_headers()
.headers(types::RefundExecuteType::get_headers(self, req, connectors)?)
- .body(types::RefundExecuteType::get_request_body(self, req, connectors)?)
+ .set_body(types::RefundExecuteType::get_request_body(self, req, connectors)?)
.build();
Ok(Some(request))
}
@@ -443,7 +441,7 @@ impl
.url(&types::RefundSyncType::get_url(self, req, connectors)?)
.attach_default_headers()
.headers(types::RefundSyncType::get_headers(self, req, connectors)?)
- .body(types::RefundSyncType::get_request_body(self, req, connectors)?)
+ .set_body(types::RefundSyncType::get_request_body(self, req, connectors)?)
.build(),
))
}
@@ -485,7 +483,7 @@ impl api::IncomingWebhook for {{project-name | downcase | pascal_case}} {
fn get_webhook_resource_object(
&self,
_request: &api::IncomingWebhookRequestDetails<'_>,
- ) -> CustomResult {
+ ) -> CustomResult, errors::ConnectorError> {
Err(errors::ConnectorError::WebhooksNotImplemented).into_report()
}
}
diff --git a/connector-template/test.rs b/connector-template/test.rs
index 5bbf761dea19..7b093ddb6efa 100644
--- a/connector-template/test.rs
+++ b/connector-template/test.rs
@@ -17,6 +17,7 @@ impl utils::Connector for {{project-name | downcase | pascal_case}}Test {
connector: Box::new(&{{project-name | downcase | pascal_case}}),
connector_name: types::Connector::{{project-name | downcase | pascal_case}},
get_token: types::api::GetToken::Connector,
+ merchant_connector_id: None,
}
}
diff --git a/connector-template/transformers.rs b/connector-template/transformers.rs
index 3ed53a906a2e..60b13693054d 100644
--- a/connector-template/transformers.rs
+++ b/connector-template/transformers.rs
@@ -42,7 +42,6 @@ pub struct {{project-name | downcase | pascal_case}}PaymentsRequest {
#[derive(Default, Debug, Serialize, Eq, PartialEq)]
pub struct {{project-name | downcase | pascal_case}}Card {
- name: Secret,
number: cards::CardNumber,
expiry_month: Secret,
expiry_year: Secret,
@@ -56,7 +55,6 @@ impl TryFrom<&{{project-name | downcase | pascal_case}}RouterData<&types::Paymen
match item.router_data.request.payment_method_data.clone() {
api::PaymentMethodData::Card(req_card) => {
let card = {{project-name | downcase | pascal_case}}Card {
- name: req_card.card_holder_name,
number: req_card.card_number,
expiry_month: req_card.card_exp_month,
expiry_year: req_card.card_exp_year,
@@ -130,6 +128,7 @@ impl TryFrom kafka-ui is a visual tool for inspecting kafka on localhost:8090
+
+#### Setting up Clickhouse
+
+Once clickhouse is up & running you need to create the required tables for it
+
+you can either visit the url (http://localhost:8123/play) in which the clickhouse-server is running to get a playground
+Alternatively you can bash into the clickhouse container & execute commands manually
+```
+# On your local terminal
+docker compose exec clickhouse-server bash
+
+# Inside the clickhouse-server container shell
+clickhouse-client --user default
+
+# Inside the clickhouse-client shell
+SHOW TABLES;
+CREATE TABLE ......
+```
+
+The table creation scripts are provided [here](./scripts)
+
+#### Running/Debugging your application
+Once setup you can run your application either via docker compose or normally via cargo run
+
+Remember to enable the kafka_events via development.toml/docker_compose.toml files
+
+Inspect the [kafka-ui](http://localhost:8090) to check the messages being inserted in queue
+
+If the messages/topic are available then you can run select queries on your clickhouse table to ensure data is being populated...
+
+If the data is not being populated in clickhouse, you can check the error logs in clickhouse server via
+```
+# Inside the clickhouse-server container shell
+tail -f /var/log/clickhouse-server/clickhouse-server.err.log
+```
\ No newline at end of file
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/README.md b/crates/analytics/docs/clickhouse/cluster_setup/README.md
new file mode 100644
index 000000000000..cd5f2dfeb023
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/README.md
@@ -0,0 +1,347 @@
+# Tutorial for set up clickhouse server
+
+
+## Single server with docker
+
+
+- Run server
+
+```
+docker run -d --name clickhouse-server -p 9000:9000 --ulimit nofile=262144:262144 yandex/clickhouse-server
+
+```
+
+- Run client
+
+```
+docker run -it --rm --link clickhouse-server:clickhouse-server yandex/clickhouse-client --host clickhouse-server
+```
+
+Now you can see if it success setup or not.
+
+
+## Setup Cluster
+
+
+This part we will setup
+
+- 1 cluster, with 3 shards
+- Each shard has 2 replica server
+- Use ReplicatedMergeTree & Distributed table to setup our table.
+
+
+### Cluster
+
+Let's see our docker-compose.yml first.
+
+```
+version: '3'
+
+services:
+ clickhouse-zookeeper:
+ image: zookeeper
+ ports:
+ - "2181:2181"
+ - "2182:2182"
+ container_name: clickhouse-zookeeper
+ hostname: clickhouse-zookeeper
+
+ clickhouse-01:
+ image: yandex/clickhouse-server
+ hostname: clickhouse-01
+ container_name: clickhouse-01
+ ports:
+ - 9001:9000
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-01.xml:/etc/clickhouse-server/config.d/macros.xml
+ # - ./data/server-01:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ clickhouse-02:
+ image: yandex/clickhouse-server
+ hostname: clickhouse-02
+ container_name: clickhouse-02
+ ports:
+ - 9002:9000
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-02.xml:/etc/clickhouse-server/config.d/macros.xml
+ # - ./data/server-02:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ clickhouse-03:
+ image: yandex/clickhouse-server
+ hostname: clickhouse-03
+ container_name: clickhouse-03
+ ports:
+ - 9003:9000
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-03.xml:/etc/clickhouse-server/config.d/macros.xml
+ # - ./data/server-03:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ clickhouse-04:
+ image: yandex/clickhouse-server
+ hostname: clickhouse-04
+ container_name: clickhouse-04
+ ports:
+ - 9004:9000
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-04.xml:/etc/clickhouse-server/config.d/macros.xml
+ # - ./data/server-04:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ clickhouse-05:
+ image: yandex/clickhouse-server
+ hostname: clickhouse-05
+ container_name: clickhouse-05
+ ports:
+ - 9005:9000
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-05.xml:/etc/clickhouse-server/config.d/macros.xml
+ # - ./data/server-05:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ clickhouse-06:
+ image: yandex/clickhouse-server
+ hostname: clickhouse-06
+ container_name: clickhouse-06
+ ports:
+ - 9006:9000
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-06.xml:/etc/clickhouse-server/config.d/macros.xml
+ # - ./data/server-06:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+networks:
+ default:
+ external:
+ name: clickhouse-net
+```
+
+
+We have 6 clickhouse server container and one zookeeper container.
+
+
+**To enable replication ZooKeeper is required. ClickHouse will take care of data consistency on all replicas and run restore procedure after failure automatically. It's recommended to deploy ZooKeeper cluster to separate servers.**
+
+**ZooKeeper is not a requirement — in some simple cases you can duplicate the data by writing it into all the replicas from your application code. This approach is not recommended — in this case ClickHouse is not able to guarantee data consistency on all replicas. This remains the responsibility of your application.**
+
+
+Let's see config file.
+
+`./config/clickhouse_config.xml` is the default config file in docker, we copy it out and add this line
+
+```
+
+ /etc/clickhouse-server/metrika.xml
+```
+
+
+So lets see `clickhouse_metrika.xml`
+
+```
+
+
+
+
+ 1
+ true
+
+ clickhouse-01
+ 9000
+
+
+ clickhouse-06
+ 9000
+
+
+
+ 1
+ true
+
+ clickhouse-02
+ 9000
+
+
+ clickhouse-03
+ 9000
+
+
+
+ 1
+ true
+
+
+ clickhouse-04
+ 9000
+
+
+ clickhouse-05
+ 9000
+
+
+
+
+
+
+ clickhouse-zookeeper
+ 2181
+
+
+
+ ::/0
+
+
+
+ 10000000000
+ 0.01
+ lz4
+
+
+
+```
+
+and macros.xml, each instances has there own macros settings, like server 1:
+
+```
+
+
+ clickhouse-01
+ 01
+ 01
+
+
+```
+
+
+**Make sure your macros settings is equal to remote server settings in metrika.xml**
+
+So now you can start the server.
+
+```
+docker network create clickhouse-net
+docker-compose up -d
+```
+
+Conn to server and see if the cluster settings fine;
+
+```
+docker run -it --rm --network="clickhouse-net" --link clickhouse-01:clickhouse-server yandex/clickhouse-client --host clickhouse-server
+```
+
+```sql
+clickhouse-01 :) select * from system.clusters;
+
+SELECT *
+FROM system.clusters
+
+┌─cluster─────────────────────┬─shard_num─┬─shard_weight─┬─replica_num─┬─host_name─────┬─host_address─┬─port─┬─is_local─┬─user────┬─default_database─┐
+│ cluster_1 │ 1 │ 1 │ 1 │ clickhouse-01 │ 172.21.0.4 │ 9000 │ 1 │ default │ │
+│ cluster_1 │ 1 │ 1 │ 2 │ clickhouse-06 │ 172.21.0.5 │ 9000 │ 1 │ default │ │
+│ cluster_1 │ 2 │ 1 │ 1 │ clickhouse-02 │ 172.21.0.8 │ 9000 │ 0 │ default │ │
+│ cluster_1 │ 2 │ 1 │ 2 │ clickhouse-03 │ 172.21.0.6 │ 9000 │ 0 │ default │ │
+│ cluster_1 │ 3 │ 1 │ 1 │ clickhouse-04 │ 172.21.0.7 │ 9000 │ 0 │ default │ │
+│ cluster_1 │ 3 │ 1 │ 2 │ clickhouse-05 │ 172.21.0.3 │ 9000 │ 0 │ default │ │
+│ test_shard_localhost │ 1 │ 1 │ 1 │ localhost │ 127.0.0.1 │ 9000 │ 1 │ default │ │
+│ test_shard_localhost_secure │ 1 │ 1 │ 1 │ localhost │ 127.0.0.1 │ 9440 │ 0 │ default │ │
+└─────────────────────────────┴───────────┴──────────────┴─────────────┴───────────────┴──────────────┴──────┴──────────┴─────────┴──────────────────┘
+```
+
+If you see this, it means cluster's settings work well(but not conn fine).
+
+
+### Replica Table
+
+So now we have a cluster and replica settings. For clickhouse, we need to create ReplicatedMergeTree Table as a local table in every server.
+
+```sql
+CREATE TABLE ttt (id Int32) ENGINE = ReplicatedMergeTree('/clickhouse/tables/{layer}-{shard}/ttt', '{replica}') PARTITION BY id ORDER BY id
+```
+
+and Create Distributed Table conn to local table
+
+```sql
+CREATE TABLE ttt_all as ttt ENGINE = Distributed(cluster_1, default, ttt, rand());
+```
+
+
+### Insert and test
+
+gen some data and test.
+
+
+```
+# docker exec into client server 1 and
+for ((idx=1;idx<=100;++idx)); do clickhouse-client --host clickhouse-server --query "Insert into default.ttt_all values ($idx)"; done;
+```
+
+For Distributed table.
+
+```
+select count(*) from ttt_all;
+```
+
+For loacl table.
+
+```
+select count(*) from ttt;
+```
+
+
+## Authentication
+
+Please see config/users.xml
+
+
+- Conn
+```bash
+docker run -it --rm --network="clickhouse-net" --link clickhouse-01:clickhouse-server yandex/clickhouse-client --host clickhouse-server -u user1 --password 123456
+```
+
+## Source
+
+- https://clickhouse.yandex/docs/en/operations/table_engines/replication/#creating-replicated-tables
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/clickhouse_config.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/clickhouse_config.xml
new file mode 100644
index 000000000000..94c854dc273a
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/config/clickhouse_config.xml
@@ -0,0 +1,370 @@
+
+
+
+
+ error
+ 1000M
+ 1
+ 10
+
+
+
+ 8123
+ 9000
+
+
+
+
+
+
+
+
+ /etc/clickhouse-server/server.crt
+ /etc/clickhouse-server/server.key
+
+ /etc/clickhouse-server/dhparam.pem
+ none
+ true
+ true
+ sslv2,sslv3
+ true
+
+
+
+ true
+ true
+ sslv2,sslv3
+ true
+
+
+
+ RejectCertificateHandler
+
+
+
+
+
+
+
+
+ 9009
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 4096
+ 3
+
+
+ 100
+
+
+
+
+
+ 8589934592
+
+
+ 5368709120
+
+
+
+ /var/lib/clickhouse/
+
+
+ /var/lib/clickhouse/tmp/
+
+
+ /var/lib/clickhouse/user_files/
+
+
+ users.xml
+
+
+ default
+
+
+
+
+
+ default
+
+
+
+
+
+
+
+
+
+
+
+
+
+ localhost
+ 9000
+
+
+
+
+
+
+ localhost
+ 9440
+ 1
+
+
+
+
+
+
+
+ /etc/clickhouse-server/metrika.xml
+
+
+
+
+
+
+
+
+ 3600
+
+
+
+ 3600
+
+
+ 60
+
+
+
+
+
+
+
+
+
+ system
+
+
+ toYYYYMM(event_date)
+
+ 7500
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ *_dictionary.xml
+
+
+
+
+
+
+
+
+
+ /clickhouse/task_queue/ddl
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ click_cost
+ any
+
+ 0
+ 3600
+
+
+ 86400
+ 60
+
+
+
+ max
+
+ 0
+ 60
+
+
+ 3600
+ 300
+
+
+ 86400
+ 3600
+
+
+
+
+
+ /var/lib/clickhouse/format_schemas/
+
+
+
+
+
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/clickhouse_metrika.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/clickhouse_metrika.xml
new file mode 100644
index 000000000000..b58ffc34bc29
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/config/clickhouse_metrika.xml
@@ -0,0 +1,60 @@
+
+
+
+
+ 1
+ true
+
+ clickhouse-01
+ 9000
+
+
+ clickhouse-06
+ 9000
+
+
+
+ 1
+ true
+
+ clickhouse-02
+ 9000
+
+
+ clickhouse-03
+ 9000
+
+
+
+ 1
+ true
+
+
+ clickhouse-04
+ 9000
+
+
+ clickhouse-05
+ 9000
+
+
+
+
+
+
+ clickhouse-zookeeper
+ 2181
+
+
+
+ ::/0
+
+
+
+ 10000000000
+ 0.01
+ lz4
+
+
+
+
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-01.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-01.xml
new file mode 100644
index 000000000000..75df1c5916e8
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-01.xml
@@ -0,0 +1,9 @@
+
+
+ clickhouse-01
+ 01
+ 01
+ data
+ cluster_1
+
+
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-02.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-02.xml
new file mode 100644
index 000000000000..67e4a545b30c
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-02.xml
@@ -0,0 +1,9 @@
+
+
+ clickhouse-02
+ 02
+ 01
+ data
+ cluster_1
+
+
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-03.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-03.xml
new file mode 100644
index 000000000000..e9278191b80f
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-03.xml
@@ -0,0 +1,9 @@
+
+
+ clickhouse-03
+ 02
+ 01
+ data
+ cluster_1
+
+
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-04.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-04.xml
new file mode 100644
index 000000000000..033c0ad1152e
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-04.xml
@@ -0,0 +1,9 @@
+
+
+ clickhouse-04
+ 03
+ 01
+ data
+ cluster_1
+
+
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-05.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-05.xml
new file mode 100644
index 000000000000..c63314c5acea
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-05.xml
@@ -0,0 +1,9 @@
+
+
+ clickhouse-05
+ 03
+ 01
+ data
+ cluster_1
+
+
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-06.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-06.xml
new file mode 100644
index 000000000000..4b01bda9948c
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/config/macros/macros-06.xml
@@ -0,0 +1,9 @@
+
+
+ clickhouse-06
+ 01
+ 01
+ data
+ cluster_1
+
+
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/config/users.xml b/crates/analytics/docs/clickhouse/cluster_setup/config/users.xml
new file mode 100644
index 000000000000..e1b8de78e37a
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/config/users.xml
@@ -0,0 +1,117 @@
+
+
+
+
+
+
+
+ 10000000000
+
+
+ 0
+
+
+ random
+
+
+
+
+ 1
+
+
+
+
+
+
+ 123456
+
+ ::/0
+
+ default
+ default
+
+
+
+
+
+
+
+
+ ::/0
+
+
+
+ default
+
+
+ default
+
+
+
+
+
+
+ ::1
+ 127.0.0.1
+
+ readonly
+ default
+
+
+
+
+
+
+
+
+
+
+ 3600
+
+
+ 0
+ 0
+ 0
+ 0
+ 0
+
+
+
+
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/docker-compose.yml b/crates/analytics/docs/clickhouse/cluster_setup/docker-compose.yml
new file mode 100644
index 000000000000..96d7618b47e6
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/docker-compose.yml
@@ -0,0 +1,198 @@
+version: '3'
+
+networks:
+ ckh_net:
+
+services:
+ clickhouse-zookeeper:
+ image: zookeeper
+ ports:
+ - "2181:2181"
+ - "2182:2182"
+ container_name: clickhouse-zookeeper
+ hostname: clickhouse-zookeeper
+ networks:
+ - ckh_net
+
+ clickhouse-01:
+ image: clickhouse/clickhouse-server
+ hostname: clickhouse-01
+ container_name: clickhouse-01
+ networks:
+ - ckh_net
+ ports:
+ - 9001:9000
+ - 8124:8123
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-01.xml:/etc/clickhouse-server/config.d/macros.xml
+ - ./config/users.xml:/etc/clickhouse-server/users.xml
+ # - ./data/server-01:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ clickhouse-02:
+ image: clickhouse/clickhouse-server
+ hostname: clickhouse-02
+ container_name: clickhouse-02
+ networks:
+ - ckh_net
+ ports:
+ - 9002:9000
+ - 8125:8123
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-02.xml:/etc/clickhouse-server/config.d/macros.xml
+ - ./config/users.xml:/etc/clickhouse-server/users.xml
+ # - ./data/server-02:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ clickhouse-03:
+ image: clickhouse/clickhouse-server
+ hostname: clickhouse-03
+ container_name: clickhouse-03
+ networks:
+ - ckh_net
+ ports:
+ - 9003:9000
+ - 8126:8123
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-03.xml:/etc/clickhouse-server/config.d/macros.xml
+ - ./config/users.xml:/etc/clickhouse-server/users.xml
+ # - ./data/server-03:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ clickhouse-04:
+ image: clickhouse/clickhouse-server
+ hostname: clickhouse-04
+ container_name: clickhouse-04
+ networks:
+ - ckh_net
+ ports:
+ - 9004:9000
+ - 8127:8123
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-04.xml:/etc/clickhouse-server/config.d/macros.xml
+ - ./config/users.xml:/etc/clickhouse-server/users.xml
+ # - ./data/server-04:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ clickhouse-05:
+ image: clickhouse/clickhouse-server
+ hostname: clickhouse-05
+ container_name: clickhouse-05
+ networks:
+ - ckh_net
+ ports:
+ - 9005:9000
+ - 8128:8123
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-05.xml:/etc/clickhouse-server/config.d/macros.xml
+ - ./config/users.xml:/etc/clickhouse-server/users.xml
+ # - ./data/server-05:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ clickhouse-06:
+ image: clickhouse/clickhouse-server
+ hostname: clickhouse-06
+ container_name: clickhouse-06
+ networks:
+ - ckh_net
+ ports:
+ - 9006:9000
+ - 8129:8123
+ volumes:
+ - ./config/clickhouse_config.xml:/etc/clickhouse-server/config.xml
+ - ./config/clickhouse_metrika.xml:/etc/clickhouse-server/metrika.xml
+ - ./config/macros/macros-06.xml:/etc/clickhouse-server/config.d/macros.xml
+ - ./config/users.xml:/etc/clickhouse-server/users.xml
+ # - ./data/server-06:/var/lib/clickhouse
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ depends_on:
+ - "clickhouse-zookeeper"
+
+ kafka0:
+ image: confluentinc/cp-kafka:7.0.5
+ hostname: kafka0
+ container_name: kafka0
+ ports:
+ - 9092:9092
+ - 9093
+ - 9997
+ - 29092
+ environment:
+ KAFKA_BROKER_ID: 1
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONTROLLER:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092
+ KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
+ KAFKA_PROCESS_ROLES: 'broker,controller'
+ KAFKA_NODE_ID: 1
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka0:29093'
+ KAFKA_LISTENERS: 'PLAINTEXT://kafka0:29092,CONTROLLER://kafka0:29093,PLAINTEXT_HOST://0.0.0.0:9092'
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
+ JMX_PORT: 9997
+ KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka0 -Dcom.sun.management.jmxremote.rmi.port=9997
+ volumes:
+ - ./kafka-script.sh:/tmp/update_run.sh
+ command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
+ networks:
+ ckh_net:
+ aliases:
+ - hyper-c1-kafka-brokers.kafka-cluster.svc.cluster.local
+
+
+ # Kafka UI for debugging kafka queues
+ kafka-ui:
+ container_name: kafka-ui
+ image: provectuslabs/kafka-ui:latest
+ ports:
+ - 8090:8080
+ depends_on:
+ - kafka0
+ networks:
+ - ckh_net
+ environment:
+ KAFKA_CLUSTERS_0_NAME: local
+ KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
+ KAFKA_CLUSTERS_0_JMXPORT: 9997
+
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/kafka-script.sh b/crates/analytics/docs/clickhouse/cluster_setup/kafka-script.sh
new file mode 100755
index 000000000000..023c832b4e1b
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/kafka-script.sh
@@ -0,0 +1,11 @@
+# This script is required to run kafka cluster (without zookeeper)
+#!/bin/sh
+
+# Docker workaround: Remove check for KAFKA_ZOOKEEPER_CONNECT parameter
+sed -i '/KAFKA_ZOOKEEPER_CONNECT/d' /etc/confluent/docker/configure
+
+# Docker workaround: Ignore cub zk-ready
+sed -i 's/cub zk-ready/echo ignore zk-ready/' /etc/confluent/docker/ensure
+
+# KRaft required step: Format the storage directory with a new cluster ID
+echo "kafka-storage format --ignore-formatted -t $(kafka-storage random-uuid) -c /etc/kafka/kafka.properties" >> /etc/confluent/docker/ensure
\ No newline at end of file
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/scripts/api_event_logs.sql b/crates/analytics/docs/clickhouse/cluster_setup/scripts/api_event_logs.sql
new file mode 100644
index 000000000000..ad0fe6d778fd
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/scripts/api_event_logs.sql
@@ -0,0 +1,242 @@
+CREATE TABLE hyperswitch.api_events_queue on cluster '{cluster}' (
+ `merchant_id` String,
+ `payment_id` Nullable(String),
+ `refund_id` Nullable(String),
+ `payment_method_id` Nullable(String),
+ `payment_method` Nullable(String),
+ `payment_method_type` Nullable(String),
+ `customer_id` Nullable(String),
+ `user_id` Nullable(String),
+ `request_id` String,
+ `flow_type` LowCardinality(String),
+ `api_name` LowCardinality(String),
+ `request` String,
+ `response` String,
+ `status_code` UInt32,
+ `url_path` LowCardinality(Nullable(String)),
+ `event_type` LowCardinality(Nullable(String)),
+ `created_at` DateTime CODEC(T64, LZ4),
+ `latency` Nullable(UInt128),
+ `user_agent` Nullable(String),
+ `ip_addr` Nullable(String),
+ `dispute_id` Nullable(String)
+) ENGINE = Kafka SETTINGS kafka_broker_list = 'hyper-c1-kafka-brokers.kafka-cluster.svc.cluster.local:9092',
+kafka_topic_list = 'hyperswitch-api-log-events',
+kafka_group_name = 'hyper-c1',
+kafka_format = 'JSONEachRow',
+kafka_handle_error_mode = 'stream';
+
+
+CREATE TABLE hyperswitch.api_events_clustered on cluster '{cluster}' (
+ `merchant_id` String,
+ `payment_id` Nullable(String),
+ `refund_id` Nullable(String),
+ `payment_method_id` Nullable(String),
+ `payment_method` Nullable(String),
+ `payment_method_type` Nullable(String),
+ `customer_id` Nullable(String),
+ `user_id` Nullable(String),
+ `request_id` Nullable(String),
+ `flow_type` LowCardinality(String),
+ `api_name` LowCardinality(String),
+ `request` String,
+ `response` String,
+ `status_code` UInt32,
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `latency` Nullable(UInt128),
+ `user_agent` Nullable(String),
+ `ip_addr` Nullable(String),
+ INDEX flowIndex flow_type TYPE bloom_filter GRANULARITY 1,
+ INDEX apiIndex api_name TYPE bloom_filter GRANULARITY 1,
+ INDEX statusIndex status_code TYPE bloom_filter GRANULARITY 1
+) ENGINE = ReplicatedMergeTree(
+ '/clickhouse/{installation}/{cluster}/tables/{shard}/hyperswitch/api_events_clustered',
+ '{replica}'
+)
+PARTITION BY toStartOfDay(created_at)
+ORDER BY
+ (created_at, merchant_id, flow_type, status_code, api_name)
+TTL created_at + toIntervalMonth(6)
+;
+
+
+CREATE TABLE hyperswitch.api_events_dist on cluster '{cluster}' (
+ `merchant_id` String,
+ `payment_id` Nullable(String),
+ `refund_id` Nullable(String),
+ `payment_method_id` Nullable(String),
+ `payment_method` Nullable(String),
+ `payment_method_type` Nullable(String),
+ `customer_id` Nullable(String),
+ `user_id` Nullable(String),
+ `request_id` Nullable(String),
+ `flow_type` LowCardinality(String),
+ `api_name` LowCardinality(String),
+ `request` String,
+ `response` String,
+ `status_code` UInt32,
+ `url_path` LowCardinality(Nullable(String)),
+ `event_type` LowCardinality(Nullable(String)),
+ `inserted_at` DateTime64(3),
+ `created_at` DateTime64(3),
+ `latency` Nullable(UInt128),
+ `user_agent` Nullable(String),
+ `ip_addr` Nullable(String),
+ `dispute_id` Nullable(String)
+) ENGINE = Distributed('{cluster}', 'hyperswitch', 'api_events_clustered', rand());
+
+CREATE MATERIALIZED VIEW hyperswitch.api_events_mv on cluster '{cluster}' TO hyperswitch.api_events_dist (
+ `merchant_id` String,
+ `payment_id` Nullable(String),
+ `refund_id` Nullable(String),
+ `payment_method_id` Nullable(String),
+ `payment_method` Nullable(String),
+ `payment_method_type` Nullable(String),
+ `customer_id` Nullable(String),
+ `user_id` Nullable(String),
+ `request_id` Nullable(String),
+ `flow_type` LowCardinality(String),
+ `api_name` LowCardinality(String),
+ `request` String,
+ `response` String,
+ `status_code` UInt32,
+ `url_path` LowCardinality(Nullable(String)),
+ `event_type` LowCardinality(Nullable(String)),
+ `inserted_at` DateTime64(3),
+ `created_at` DateTime64(3),
+ `latency` Nullable(UInt128),
+ `user_agent` Nullable(String),
+ `ip_addr` Nullable(String),
+ `dispute_id` Nullable(String)
+) AS
+SELECT
+ merchant_id,
+ payment_id,
+ refund_id,
+ payment_method_id,
+ payment_method,
+ payment_method_type,
+ customer_id,
+ user_id,
+ request_id,
+ flow_type,
+ api_name,
+ request,
+ response,
+ status_code,
+ url_path,
+ event_type,
+ now() as inserted_at,
+ created_at,
+ latency,
+ user_agent,
+ ip_addr
+FROM
+ hyperswitch.api_events_queue
+WHERE length(_error) = 0;
+
+
+CREATE MATERIALIZED VIEW hyperswitch.api_events_parse_errors on cluster '{cluster}'
+(
+ `topic` String,
+ `partition` Int64,
+ `offset` Int64,
+ `raw` String,
+ `error` String
+)
+ENGINE = MergeTree
+ORDER BY (topic, partition, offset)
+SETTINGS index_granularity = 8192 AS
+SELECT
+ _topic AS topic,
+ _partition AS partition,
+ _offset AS offset,
+ _raw_message AS raw,
+ _error AS error
+FROM hyperswitch.api_events_queue
+WHERE length(_error) > 0
+;
+
+
+ALTER TABLE hyperswitch.api_events_clustered on cluster '{cluster}' ADD COLUMN `url_path` LowCardinality(Nullable(String));
+ALTER TABLE hyperswitch.api_events_clustered on cluster '{cluster}' ADD COLUMN `event_type` LowCardinality(Nullable(String));
+ALTER TABLE hyperswitch.api_events_clustered on cluster '{cluster}' ADD COLUMN `dispute_id` Nullable(String);
+
+CREATE TABLE hyperswitch.api_audit_log ON CLUSTER '{cluster}' (
+ `merchant_id` LowCardinality(String),
+ `payment_id` String,
+ `refund_id` Nullable(String),
+ `payment_method_id` Nullable(String),
+ `payment_method` Nullable(String),
+ `payment_method_type` Nullable(String),
+ `user_id` Nullable(String),
+ `request_id` Nullable(String),
+ `flow_type` LowCardinality(String),
+ `api_name` LowCardinality(String),
+ `request` String,
+ `response` String,
+ `status_code` UInt32,
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `latency` Nullable(UInt128),
+ `user_agent` Nullable(String),
+ `ip_addr` Nullable(String),
+ `url_path` LowCardinality(Nullable(String)),
+ `event_type` LowCardinality(Nullable(String)),
+ `customer_id` LowCardinality(Nullable(String))
+) ENGINE = ReplicatedMergeTree( '/clickhouse/{installation}/{cluster}/tables/{shard}/hyperswitch/api_audit_log', '{replica}' ) PARTITION BY merchant_id
+ORDER BY (merchant_id, payment_id)
+TTL created_at + toIntervalMonth(18)
+SETTINGS index_granularity = 8192
+
+
+CREATE MATERIALIZED VIEW hyperswitch.api_audit_log_mv ON CLUSTER `{cluster}` TO hyperswitch.api_audit_log(
+ `merchant_id` LowCardinality(String),
+ `payment_id` String,
+ `refund_id` Nullable(String),
+ `payment_method_id` Nullable(String),
+ `payment_method` Nullable(String),
+ `payment_method_type` Nullable(String),
+ `customer_id` Nullable(String),
+ `user_id` Nullable(String),
+ `request_id` Nullable(String),
+ `flow_type` LowCardinality(String),
+ `api_name` LowCardinality(String),
+ `request` String,
+ `response` String,
+ `status_code` UInt32,
+ `url_path` LowCardinality(Nullable(String)),
+ `event_type` LowCardinality(Nullable(String)),
+ `inserted_at` DateTime64(3),
+ `created_at` DateTime64(3),
+ `latency` Nullable(UInt128),
+ `user_agent` Nullable(String),
+ `ip_addr` Nullable(String),
+ `dispute_id` Nullable(String)
+) AS
+SELECT
+ merchant_id,
+ multiIf(payment_id IS NULL, '', payment_id) AS payment_id,
+ refund_id,
+ payment_method_id,
+ payment_method,
+ payment_method_type,
+ customer_id,
+ user_id,
+ request_id,
+ flow_type,
+ api_name,
+ request,
+ response,
+ status_code,
+ url_path,
+ api_event_type AS event_type,
+ now() AS inserted_at,
+ created_at,
+ latency,
+ user_agent,
+ ip_addr,
+ dispute_id
+FROM hyperswitch.api_events_queue
+WHERE length(_error) = 0
\ No newline at end of file
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/scripts/payment_attempts.sql b/crates/analytics/docs/clickhouse/cluster_setup/scripts/payment_attempts.sql
new file mode 100644
index 000000000000..3a6281ae9050
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/scripts/payment_attempts.sql
@@ -0,0 +1,217 @@
+CREATE TABLE hyperswitch.payment_attempt_queue on cluster '{cluster}' (
+ `payment_id` String,
+ `merchant_id` String,
+ `attempt_id` String,
+ `status` LowCardinality(String),
+ `amount` Nullable(UInt32),
+ `currency` LowCardinality(Nullable(String)),
+ `connector` LowCardinality(Nullable(String)),
+ `save_to_locker` Nullable(Bool),
+ `error_message` Nullable(String),
+ `offer_amount` Nullable(UInt32),
+ `surcharge_amount` Nullable(UInt32),
+ `tax_amount` Nullable(UInt32),
+ `payment_method_id` Nullable(String),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_method_type` LowCardinality(Nullable(String)),
+ `connector_transaction_id` Nullable(String),
+ `capture_method` LowCardinality(Nullable(String)),
+ `capture_on` Nullable(DateTime) CODEC(T64, LZ4),
+ `confirm` Bool,
+ `authentication_type` LowCardinality(Nullable(String)),
+ `cancellation_reason` Nullable(String),
+ `amount_to_capture` Nullable(UInt32),
+ `mandate_id` Nullable(String),
+ `browser_info` Nullable(String),
+ `error_code` Nullable(String),
+ `connector_metadata` Nullable(String),
+ `payment_experience` Nullable(String),
+ `created_at` DateTime CODEC(T64, LZ4),
+ `last_synced` Nullable(DateTime) CODEC(T64, LZ4),
+ `modified_at` DateTime CODEC(T64, LZ4),
+ `sign_flag` Int8
+) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
+kafka_topic_list = 'hyperswitch-payment-attempt-events',
+kafka_group_name = 'hyper-c1',
+kafka_format = 'JSONEachRow',
+kafka_handle_error_mode = 'stream';
+
+
+CREATE TABLE hyperswitch.payment_attempt_dist on cluster '{cluster}' (
+ `payment_id` String,
+ `merchant_id` String,
+ `attempt_id` String,
+ `status` LowCardinality(String),
+ `amount` Nullable(UInt32),
+ `currency` LowCardinality(Nullable(String)),
+ `connector` LowCardinality(Nullable(String)),
+ `save_to_locker` Nullable(Bool),
+ `error_message` Nullable(String),
+ `offer_amount` Nullable(UInt32),
+ `surcharge_amount` Nullable(UInt32),
+ `tax_amount` Nullable(UInt32),
+ `payment_method_id` Nullable(String),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_method_type` LowCardinality(Nullable(String)),
+ `connector_transaction_id` Nullable(String),
+ `capture_method` Nullable(String),
+ `capture_on` Nullable(DateTime) CODEC(T64, LZ4),
+ `confirm` Bool,
+ `authentication_type` LowCardinality(Nullable(String)),
+ `cancellation_reason` Nullable(String),
+ `amount_to_capture` Nullable(UInt32),
+ `mandate_id` Nullable(String),
+ `browser_info` Nullable(String),
+ `error_code` Nullable(String),
+ `connector_metadata` Nullable(String),
+ `payment_experience` Nullable(String),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `last_synced` Nullable(DateTime) CODEC(T64, LZ4),
+ `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `sign_flag` Int8
+) ENGINE = Distributed('{cluster}', 'hyperswitch', 'payment_attempt_clustered', cityHash64(attempt_id));
+
+
+
+CREATE MATERIALIZED VIEW hyperswitch.payment_attempt_mv on cluster '{cluster}' TO hyperswitch.payment_attempt_dist (
+ `payment_id` String,
+ `merchant_id` String,
+ `attempt_id` String,
+ `status` LowCardinality(String),
+ `amount` Nullable(UInt32),
+ `currency` LowCardinality(Nullable(String)),
+ `connector` LowCardinality(Nullable(String)),
+ `save_to_locker` Nullable(Bool),
+ `error_message` Nullable(String),
+ `offer_amount` Nullable(UInt32),
+ `surcharge_amount` Nullable(UInt32),
+ `tax_amount` Nullable(UInt32),
+ `payment_method_id` Nullable(String),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_method_type` LowCardinality(Nullable(String)),
+ `connector_transaction_id` Nullable(String),
+ `capture_method` Nullable(String),
+ `confirm` Bool,
+ `authentication_type` LowCardinality(Nullable(String)),
+ `cancellation_reason` Nullable(String),
+ `amount_to_capture` Nullable(UInt32),
+ `mandate_id` Nullable(String),
+ `browser_info` Nullable(String),
+ `error_code` Nullable(String),
+ `connector_metadata` Nullable(String),
+ `payment_experience` Nullable(String),
+ `created_at` DateTime64(3),
+ `capture_on` Nullable(DateTime64(3)),
+ `last_synced` Nullable(DateTime64(3)),
+ `modified_at` DateTime64(3),
+ `inserted_at` DateTime64(3),
+ `sign_flag` Int8
+) AS
+SELECT
+ payment_id,
+ merchant_id,
+ attempt_id,
+ status,
+ amount,
+ currency,
+ connector,
+ save_to_locker,
+ error_message,
+ offer_amount,
+ surcharge_amount,
+ tax_amount,
+ payment_method_id,
+ payment_method,
+ payment_method_type,
+ connector_transaction_id,
+ capture_method,
+ confirm,
+ authentication_type,
+ cancellation_reason,
+ amount_to_capture,
+ mandate_id,
+ browser_info,
+ error_code,
+ connector_metadata,
+ payment_experience,
+ created_at,
+ capture_on,
+ last_synced,
+ modified_at,
+ now() as inserted_at,
+ sign_flag
+FROM
+ hyperswitch.payment_attempt_queue
+WHERE length(_error) = 0;
+
+
+CREATE TABLE hyperswitch.payment_attempt_clustered on cluster '{cluster}' (
+ `payment_id` String,
+ `merchant_id` String,
+ `attempt_id` String,
+ `status` LowCardinality(String),
+ `amount` Nullable(UInt32),
+ `currency` LowCardinality(Nullable(String)),
+ `connector` LowCardinality(Nullable(String)),
+ `save_to_locker` Nullable(Bool),
+ `error_message` Nullable(String),
+ `offer_amount` Nullable(UInt32),
+ `surcharge_amount` Nullable(UInt32),
+ `tax_amount` Nullable(UInt32),
+ `payment_method_id` Nullable(String),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_method_type` LowCardinality(Nullable(String)),
+ `connector_transaction_id` Nullable(String),
+ `capture_method` Nullable(String),
+ `capture_on` Nullable(DateTime) CODEC(T64, LZ4),
+ `confirm` Bool,
+ `authentication_type` LowCardinality(Nullable(String)),
+ `cancellation_reason` Nullable(String),
+ `amount_to_capture` Nullable(UInt32),
+ `mandate_id` Nullable(String),
+ `browser_info` Nullable(String),
+ `error_code` Nullable(String),
+ `connector_metadata` Nullable(String),
+ `payment_experience` Nullable(String),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `last_synced` Nullable(DateTime) CODEC(T64, LZ4),
+ `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `sign_flag` Int8,
+ INDEX connectorIndex connector TYPE bloom_filter GRANULARITY 1,
+ INDEX paymentMethodIndex payment_method TYPE bloom_filter GRANULARITY 1,
+ INDEX authenticationTypeIndex authentication_type TYPE bloom_filter GRANULARITY 1,
+ INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1,
+ INDEX statusIndex status TYPE bloom_filter GRANULARITY 1
+) ENGINE = ReplicatedCollapsingMergeTree(
+ '/clickhouse/{installation}/{cluster}/tables/{shard}/hyperswitch/payment_attempt_clustered',
+ '{replica}',
+ sign_flag
+)
+PARTITION BY toStartOfDay(created_at)
+ORDER BY
+ (created_at, merchant_id, attempt_id)
+TTL created_at + toIntervalMonth(6)
+;
+
+CREATE MATERIALIZED VIEW hyperswitch.payment_attempt_parse_errors on cluster '{cluster}'
+(
+ `topic` String,
+ `partition` Int64,
+ `offset` Int64,
+ `raw` String,
+ `error` String
+)
+ENGINE = MergeTree
+ORDER BY (topic, partition, offset)
+SETTINGS index_granularity = 8192 AS
+SELECT
+ _topic AS topic,
+ _partition AS partition,
+ _offset AS offset,
+ _raw_message AS raw,
+ _error AS error
+FROM hyperswitch.payment_attempt_queue
+WHERE length(_error) > 0
+;
\ No newline at end of file
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/scripts/payment_intents.sql b/crates/analytics/docs/clickhouse/cluster_setup/scripts/payment_intents.sql
new file mode 100644
index 000000000000..eb2d83140e92
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/scripts/payment_intents.sql
@@ -0,0 +1,165 @@
+CREATE TABLE hyperswitch.payment_intents_queue on cluster '{cluster}' (
+ `payment_id` String,
+ `merchant_id` String,
+ `status` LowCardinality(String),
+ `amount` UInt32,
+ `currency` LowCardinality(Nullable(String)),
+ `amount_captured` Nullable(UInt32),
+ `customer_id` Nullable(String),
+ `description` Nullable(String),
+ `return_url` Nullable(String),
+ `connector_id` LowCardinality(Nullable(String)),
+ `statement_descriptor_name` Nullable(String),
+ `statement_descriptor_suffix` Nullable(String),
+ `setup_future_usage` LowCardinality(Nullable(String)),
+ `off_session` Nullable(Bool),
+ `client_secret` Nullable(String),
+ `active_attempt_id` String,
+ `business_country` String,
+ `business_label` String,
+ `modified_at` DateTime,
+ `created_at` DateTime,
+ `last_synced` Nullable(DateTime) CODEC(T64, LZ4),
+ `sign_flag` Int8
+) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
+kafka_topic_list = 'hyperswitch-payment-intent-events',
+kafka_group_name = 'hyper-c1',
+kafka_format = 'JSONEachRow',
+kafka_handle_error_mode = 'stream';
+
+CREATE TABLE hyperswitch.payment_intents_dist on cluster '{cluster}' (
+ `payment_id` String,
+ `merchant_id` String,
+ `status` LowCardinality(String),
+ `amount` UInt32,
+ `currency` LowCardinality(Nullable(String)),
+ `amount_captured` Nullable(UInt32),
+ `customer_id` Nullable(String),
+ `description` Nullable(String),
+ `return_url` Nullable(String),
+ `connector_id` LowCardinality(Nullable(String)),
+ `statement_descriptor_name` Nullable(String),
+ `statement_descriptor_suffix` Nullable(String),
+ `setup_future_usage` LowCardinality(Nullable(String)),
+ `off_session` Nullable(Bool),
+ `client_secret` Nullable(String),
+ `active_attempt_id` String,
+ `business_country` LowCardinality(String),
+ `business_label` String,
+ `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `last_synced` Nullable(DateTime) CODEC(T64, LZ4),
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `sign_flag` Int8
+) ENGINE = Distributed('{cluster}', 'hyperswitch', 'payment_intents_clustered', cityHash64(payment_id));
+
+CREATE TABLE hyperswitch.payment_intents_clustered on cluster '{cluster}' (
+ `payment_id` String,
+ `merchant_id` String,
+ `status` LowCardinality(String),
+ `amount` UInt32,
+ `currency` LowCardinality(Nullable(String)),
+ `amount_captured` Nullable(UInt32),
+ `customer_id` Nullable(String),
+ `description` Nullable(String),
+ `return_url` Nullable(String),
+ `connector_id` LowCardinality(Nullable(String)),
+ `statement_descriptor_name` Nullable(String),
+ `statement_descriptor_suffix` Nullable(String),
+ `setup_future_usage` LowCardinality(Nullable(String)),
+ `off_session` Nullable(Bool),
+ `client_secret` Nullable(String),
+ `active_attempt_id` String,
+ `business_country` LowCardinality(String),
+ `business_label` String,
+ `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `last_synced` Nullable(DateTime) CODEC(T64, LZ4),
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `sign_flag` Int8,
+ INDEX connectorIndex connector_id TYPE bloom_filter GRANULARITY 1,
+ INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1,
+ INDEX statusIndex status TYPE bloom_filter GRANULARITY 1
+) ENGINE = ReplicatedCollapsingMergeTree(
+ '/clickhouse/{installation}/{cluster}/tables/{shard}/hyperswitch/payment_intents_clustered',
+ '{replica}',
+ sign_flag
+)
+PARTITION BY toStartOfDay(created_at)
+ORDER BY
+ (created_at, merchant_id, payment_id)
+TTL created_at + toIntervalMonth(6)
+;
+
+CREATE MATERIALIZED VIEW hyperswitch.payment_intent_mv on cluster '{cluster}' TO hyperswitch.payment_intents_dist (
+ `payment_id` String,
+ `merchant_id` String,
+ `status` LowCardinality(String),
+ `amount` UInt32,
+ `currency` LowCardinality(Nullable(String)),
+ `amount_captured` Nullable(UInt32),
+ `customer_id` Nullable(String),
+ `description` Nullable(String),
+ `return_url` Nullable(String),
+ `connector_id` LowCardinality(Nullable(String)),
+ `statement_descriptor_name` Nullable(String),
+ `statement_descriptor_suffix` Nullable(String),
+ `setup_future_usage` LowCardinality(Nullable(String)),
+ `off_session` Nullable(Bool),
+ `client_secret` Nullable(String),
+ `active_attempt_id` String,
+ `business_country` LowCardinality(String),
+ `business_label` String,
+ `modified_at` DateTime64(3),
+ `created_at` DateTime64(3),
+ `last_synced` Nullable(DateTime64(3)),
+ `inserted_at` DateTime64(3),
+ `sign_flag` Int8
+) AS
+SELECT
+ payment_id,
+ merchant_id,
+ status,
+ amount,
+ currency,
+ amount_captured,
+ customer_id,
+ description,
+ return_url,
+ connector_id,
+ statement_descriptor_name,
+ statement_descriptor_suffix,
+ setup_future_usage,
+ off_session,
+ client_secret,
+ active_attempt_id,
+ business_country,
+ business_label,
+ modified_at,
+ created_at,
+ last_synced,
+ now() as inserted_at,
+ sign_flag
+FROM hyperswitch.payment_intents_queue
+WHERE length(_error) = 0;
+
+CREATE MATERIALIZED VIEW hyperswitch.payment_intent_parse_errors on cluster '{cluster}'
+(
+ `topic` String,
+ `partition` Int64,
+ `offset` Int64,
+ `raw` String,
+ `error` String
+)
+ENGINE = MergeTree
+ORDER BY (topic, partition, offset)
+SETTINGS index_granularity = 8192 AS
+SELECT
+ _topic AS topic,
+ _partition AS partition,
+ _offset AS offset,
+ _raw_message AS raw,
+ _error AS error
+FROM hyperswitch.payment_intents_queue
+WHERE length(_error) > 0
+;
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/scripts/refund_analytics.sql b/crates/analytics/docs/clickhouse/cluster_setup/scripts/refund_analytics.sql
new file mode 100644
index 000000000000..bf5f6e0e2405
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/scripts/refund_analytics.sql
@@ -0,0 +1,173 @@
+CREATE TABLE hyperswitch.refund_queue on cluster '{cluster}' (
+ `internal_reference_id` String,
+ `refund_id` String,
+ `payment_id` String,
+ `merchant_id` String,
+ `connector_transaction_id` String,
+ `connector` LowCardinality(Nullable(String)),
+ `connector_refund_id` Nullable(String),
+ `external_reference_id` Nullable(String),
+ `refund_type` LowCardinality(String),
+ `total_amount` Nullable(UInt32),
+ `currency` LowCardinality(String),
+ `refund_amount` Nullable(UInt32),
+ `refund_status` LowCardinality(String),
+ `sent_to_gateway` Bool,
+ `refund_error_message` Nullable(String),
+ `refund_arn` Nullable(String),
+ `attempt_id` String,
+ `description` Nullable(String),
+ `refund_reason` Nullable(String),
+ `refund_error_code` Nullable(String),
+ `created_at` DateTime,
+ `modified_at` DateTime,
+ `sign_flag` Int8
+) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
+kafka_topic_list = 'hyperswitch-refund-events',
+kafka_group_name = 'hyper-c1',
+kafka_format = 'JSONEachRow',
+kafka_handle_error_mode = 'stream';
+
+CREATE TABLE hyperswitch.refund_dist on cluster '{cluster}' (
+ `internal_reference_id` String,
+ `refund_id` String,
+ `payment_id` String,
+ `merchant_id` String,
+ `connector_transaction_id` String,
+ `connector` LowCardinality(Nullable(String)),
+ `connector_refund_id` Nullable(String),
+ `external_reference_id` Nullable(String),
+ `refund_type` LowCardinality(String),
+ `total_amount` Nullable(UInt32),
+ `currency` LowCardinality(String),
+ `refund_amount` Nullable(UInt32),
+ `refund_status` LowCardinality(String),
+ `sent_to_gateway` Bool,
+ `refund_error_message` Nullable(String),
+ `refund_arn` Nullable(String),
+ `attempt_id` String,
+ `description` Nullable(String),
+ `refund_reason` Nullable(String),
+ `refund_error_code` Nullable(String),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `sign_flag` Int8
+) ENGINE = Distributed('{cluster}', 'hyperswitch', 'refund_clustered', cityHash64(refund_id));
+
+
+
+CREATE TABLE hyperswitch.refund_clustered on cluster '{cluster}' (
+ `internal_reference_id` String,
+ `refund_id` String,
+ `payment_id` String,
+ `merchant_id` String,
+ `connector_transaction_id` String,
+ `connector` LowCardinality(Nullable(String)),
+ `connector_refund_id` Nullable(String),
+ `external_reference_id` Nullable(String),
+ `refund_type` LowCardinality(String),
+ `total_amount` Nullable(UInt32),
+ `currency` LowCardinality(String),
+ `refund_amount` Nullable(UInt32),
+ `refund_status` LowCardinality(String),
+ `sent_to_gateway` Bool,
+ `refund_error_message` Nullable(String),
+ `refund_arn` Nullable(String),
+ `attempt_id` String,
+ `description` Nullable(String),
+ `refund_reason` Nullable(String),
+ `refund_error_code` Nullable(String),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `sign_flag` Int8,
+ INDEX connectorIndex connector TYPE bloom_filter GRANULARITY 1,
+ INDEX refundTypeIndex refund_type TYPE bloom_filter GRANULARITY 1,
+ INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1,
+ INDEX statusIndex refund_status TYPE bloom_filter GRANULARITY 1
+) ENGINE = ReplicatedCollapsingMergeTree(
+ '/clickhouse/{installation}/{cluster}/tables/{shard}/hyperswitch/refund_clustered',
+ '{replica}',
+ sign_flag
+)
+PARTITION BY toStartOfDay(created_at)
+ORDER BY
+ (created_at, merchant_id, refund_id)
+TTL created_at + toIntervalMonth(6)
+;
+
+CREATE MATERIALIZED VIEW hyperswitch.kafka_parse_refund on cluster '{cluster}' TO hyperswitch.refund_dist (
+ `internal_reference_id` String,
+ `refund_id` String,
+ `payment_id` String,
+ `merchant_id` String,
+ `connector_transaction_id` String,
+ `connector` LowCardinality(Nullable(String)),
+ `connector_refund_id` Nullable(String),
+ `external_reference_id` Nullable(String),
+ `refund_type` LowCardinality(String),
+ `total_amount` Nullable(UInt32),
+ `currency` LowCardinality(String),
+ `refund_amount` Nullable(UInt32),
+ `refund_status` LowCardinality(String),
+ `sent_to_gateway` Bool,
+ `refund_error_message` Nullable(String),
+ `refund_arn` Nullable(String),
+ `attempt_id` String,
+ `description` Nullable(String),
+ `refund_reason` Nullable(String),
+ `refund_error_code` Nullable(String),
+ `created_at` DateTime64(3),
+ `modified_at` DateTime64(3),
+ `inserted_at` DateTime64(3),
+ `sign_flag` Int8
+) AS
+SELECT
+ internal_reference_id,
+ refund_id,
+ payment_id,
+ merchant_id,
+ connector_transaction_id,
+ connector,
+ connector_refund_id,
+ external_reference_id,
+ refund_type,
+ total_amount,
+ currency,
+ refund_amount,
+ refund_status,
+ sent_to_gateway,
+ refund_error_message,
+ refund_arn,
+ attempt_id,
+ description,
+ refund_reason,
+ refund_error_code,
+ created_at,
+ modified_at,
+ now() as inserted_at,
+ sign_flag
+FROM hyperswitch.refund_queue
+WHERE length(_error) = 0;
+
+CREATE MATERIALIZED VIEW hyperswitch.refund_parse_errors on cluster '{cluster}'
+(
+ `topic` String,
+ `partition` Int64,
+ `offset` Int64,
+ `raw` String,
+ `error` String
+)
+ENGINE = MergeTree
+ORDER BY (topic, partition, offset)
+SETTINGS index_granularity = 8192 AS
+SELECT
+ _topic AS topic,
+ _partition AS partition,
+ _offset AS offset,
+ _raw_message AS raw,
+ _error AS error
+FROM hyperswitch.refund_queue
+WHERE length(_error) > 0
+;
\ No newline at end of file
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/scripts/sdk_events.sql b/crates/analytics/docs/clickhouse/cluster_setup/scripts/sdk_events.sql
new file mode 100644
index 000000000000..37766392bc70
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/scripts/sdk_events.sql
@@ -0,0 +1,156 @@
+CREATE TABLE hyperswitch.sdk_events_queue on cluster '{cluster}' (
+ `payment_id` Nullable(String),
+ `merchant_id` String,
+ `remote_ip` Nullable(String),
+ `log_type` LowCardinality(Nullable(String)),
+ `event_name` LowCardinality(Nullable(String)),
+ `first_event` LowCardinality(Nullable(String)),
+ `latency` Nullable(UInt32),
+ `timestamp` String,
+ `browser_name` LowCardinality(Nullable(String)),
+ `browser_version` Nullable(String),
+ `platform` LowCardinality(Nullable(String)),
+ `source` LowCardinality(Nullable(String)),
+ `category` LowCardinality(Nullable(String)),
+ `version` LowCardinality(Nullable(String)),
+ `value` Nullable(String),
+ `component` LowCardinality(Nullable(String)),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_experience` LowCardinality(Nullable(String))
+) ENGINE = Kafka SETTINGS
+ kafka_broker_list = 'hyper-c1-kafka-brokers.kafka-cluster.svc.cluster.local:9092',
+ kafka_topic_list = 'hyper-sdk-logs',
+ kafka_group_name = 'hyper-c1',
+ kafka_format = 'JSONEachRow',
+ kafka_handle_error_mode = 'stream';
+
+CREATE TABLE hyperswitch.sdk_events_clustered on cluster '{cluster}' (
+ `payment_id` Nullable(String),
+ `merchant_id` String,
+ `remote_ip` Nullable(String),
+ `log_type` LowCardinality(Nullable(String)),
+ `event_name` LowCardinality(Nullable(String)),
+ `first_event` Bool DEFAULT 1,
+ `browser_name` LowCardinality(Nullable(String)),
+ `browser_version` Nullable(String),
+ `platform` LowCardinality(Nullable(String)),
+ `source` LowCardinality(Nullable(String)),
+ `category` LowCardinality(Nullable(String)),
+ `version` LowCardinality(Nullable(String)),
+ `value` Nullable(String),
+ `component` LowCardinality(Nullable(String)),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_experience` LowCardinality(Nullable(String)) DEFAULT '',
+ `created_at` DateTime64(3) DEFAULT now64() CODEC(T64, LZ4),
+ `inserted_at` DateTime64(3) DEFAULT now64() CODEC(T64, LZ4),
+ `latency` Nullable(UInt32) DEFAULT 0,
+ INDEX paymentMethodIndex payment_method TYPE bloom_filter GRANULARITY 1,
+ INDEX eventIndex event_name TYPE bloom_filter GRANULARITY 1,
+ INDEX platformIndex platform TYPE bloom_filter GRANULARITY 1,
+ INDEX logTypeIndex log_type TYPE bloom_filter GRANULARITY 1,
+ INDEX categoryIndex category TYPE bloom_filter GRANULARITY 1,
+ INDEX sourceIndex source TYPE bloom_filter GRANULARITY 1,
+ INDEX componentIndex component TYPE bloom_filter GRANULARITY 1,
+ INDEX firstEventIndex first_event TYPE bloom_filter GRANULARITY 1
+) ENGINE = ReplicatedMergeTree(
+ '/clickhouse/{installation}/{cluster}/tables/{shard}/hyperswitch/sdk_events_clustered', '{replica}'
+)
+PARTITION BY
+ toStartOfDay(created_at)
+ORDER BY
+ (created_at, merchant_id)
+TTL
+ toDateTime(created_at) + toIntervalMonth(6)
+SETTINGS
+ index_granularity = 8192
+;
+
+CREATE TABLE hyperswitch.sdk_events_dist on cluster '{cluster}' (
+ `payment_id` Nullable(String),
+ `merchant_id` String,
+ `remote_ip` Nullable(String),
+ `log_type` LowCardinality(Nullable(String)),
+ `event_name` LowCardinality(Nullable(String)),
+ `first_event` Bool DEFAULT 1,
+ `browser_name` LowCardinality(Nullable(String)),
+ `browser_version` Nullable(String),
+ `platform` LowCardinality(Nullable(String)),
+ `source` LowCardinality(Nullable(String)),
+ `category` LowCardinality(Nullable(String)),
+ `version` LowCardinality(Nullable(String)),
+ `value` Nullable(String),
+ `component` LowCardinality(Nullable(String)),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_experience` LowCardinality(Nullable(String)) DEFAULT '',
+ `created_at` DateTime64(3) DEFAULT now64() CODEC(T64, LZ4),
+ `inserted_at` DateTime64(3) DEFAULT now64() CODEC(T64, LZ4),
+ `latency` Nullable(UInt32) DEFAULT 0
+) ENGINE = Distributed(
+ '{cluster}', 'hyperswitch', 'sdk_events_clustered', rand()
+);
+
+CREATE MATERIALIZED VIEW hyperswitch.sdk_events_mv on cluster '{cluster}' TO hyperswitch.sdk_events_dist (
+ `payment_id` Nullable(String),
+ `merchant_id` String,
+ `remote_ip` Nullable(String),
+ `log_type` LowCardinality(Nullable(String)),
+ `event_name` LowCardinality(Nullable(String)),
+ `first_event` Bool,
+ `latency` Nullable(UInt32),
+ `browser_name` LowCardinality(Nullable(String)),
+ `browser_version` Nullable(String),
+ `platform` LowCardinality(Nullable(String)),
+ `source` LowCardinality(Nullable(String)),
+ `category` LowCardinality(Nullable(String)),
+ `version` LowCardinality(Nullable(String)),
+ `value` Nullable(String),
+ `component` LowCardinality(Nullable(String)),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_experience` LowCardinality(Nullable(String)),
+ `created_at` DateTime64(3)
+) AS
+SELECT
+ payment_id,
+ merchant_id,
+ remote_ip,
+ log_type,
+ event_name,
+ multiIf(first_event = 'true', 1, 0) AS first_event,
+ latency,
+ browser_name,
+ browser_version,
+ platform,
+ source,
+ category,
+ version,
+ value,
+ component,
+ payment_method,
+ payment_experience,
+ toDateTime64(timestamp, 3) AS created_at
+FROM
+ hyperswitch.sdk_events_queue
+WHERE length(_error) = 0
+;
+
+CREATE MATERIALIZED VIEW hyperswitch.sdk_parse_errors on cluster '{cluster}' (
+ `topic` String,
+ `partition` Int64,
+ `offset` Int64,
+ `raw` String,
+ `error` String
+) ENGINE = MergeTree
+ ORDER BY (topic, partition, offset)
+SETTINGS
+ index_granularity = 8192 AS
+SELECT
+ _topic AS topic,
+ _partition AS partition,
+ _offset AS offset,
+ _raw_message AS raw,
+ _error AS error
+FROM
+ hyperswitch.sdk_events_queue
+WHERE
+ length(_error) > 0
+;
diff --git a/crates/analytics/docs/clickhouse/cluster_setup/scripts/seed_scripts.sql b/crates/analytics/docs/clickhouse/cluster_setup/scripts/seed_scripts.sql
new file mode 100644
index 000000000000..202b94ac6040
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/cluster_setup/scripts/seed_scripts.sql
@@ -0,0 +1 @@
+create database hyperswitch on cluster '{cluster}';
\ No newline at end of file
diff --git a/crates/analytics/docs/clickhouse/scripts/api_events.sql b/crates/analytics/docs/clickhouse/scripts/api_events.sql
new file mode 100644
index 000000000000..49a6472eaa4d
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/scripts/api_events.sql
@@ -0,0 +1,152 @@
+CREATE TABLE api_events_queue (
+ `merchant_id` String,
+ `payment_id` Nullable(String),
+ `refund_id` Nullable(String),
+ `payment_method_id` Nullable(String),
+ `payment_method` Nullable(String),
+ `payment_method_type` Nullable(String),
+ `customer_id` Nullable(String),
+ `user_id` Nullable(String),
+ `connector` Nullable(String),
+ `request_id` String,
+ `flow_type` LowCardinality(String),
+ `api_flow` LowCardinality(String),
+ `api_auth_type` LowCardinality(String),
+ `request` String,
+ `response` Nullable(String),
+ `error` Nullable(String),
+ `authentication_data` Nullable(String),
+ `status_code` UInt32,
+ `created_at_timestamp` DateTime64(3),
+ `latency` UInt128,
+ `user_agent` String,
+ `ip_addr` String,
+ `hs_latency` Nullable(UInt128),
+ `http_method` LowCardinality(String),
+ `url_path` String,
+ `dispute_id` Nullable(String)
+) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
+kafka_topic_list = 'hyperswitch-api-log-events',
+kafka_group_name = 'hyper-c1',
+kafka_format = 'JSONEachRow',
+kafka_handle_error_mode = 'stream';
+
+
+CREATE TABLE api_events_dist (
+ `merchant_id` String,
+ `payment_id` Nullable(String),
+ `refund_id` Nullable(String),
+ `payment_method_id` Nullable(String),
+ `payment_method` Nullable(String),
+ `payment_method_type` Nullable(String),
+ `customer_id` Nullable(String),
+ `user_id` Nullable(String),
+ `connector` Nullable(String),
+ `request_id` String,
+ `flow_type` LowCardinality(String),
+ `api_flow` LowCardinality(String),
+ `api_auth_type` LowCardinality(String),
+ `request` String,
+ `response` Nullable(String),
+ `error` Nullable(String),
+ `authentication_data` Nullable(String),
+ `status_code` UInt32,
+ `created_at_timestamp` DateTime64(3),
+ `latency` UInt128,
+ `user_agent` String,
+ `ip_addr` String,
+ `hs_latency` Nullable(UInt128),
+ `http_method` LowCardinality(String),
+ `url_path` String,
+ `dispute_id` Nullable(String)
+ INDEX flowIndex flow_type TYPE bloom_filter GRANULARITY 1,
+ INDEX apiIndex api_flow TYPE bloom_filter GRANULARITY 1,
+ INDEX statusIndex status_code TYPE bloom_filter GRANULARITY 1
+) ENGINE = MergeTree
+PARTITION BY toStartOfDay(created_at)
+ORDER BY
+ (created_at, merchant_id, flow_type, status_code, api_flow)
+TTL created_at + toIntervalMonth(6)
+;
+
+CREATE MATERIALIZED VIEW api_events_mv TO api_events_dist (
+ `merchant_id` String,
+ `payment_id` Nullable(String),
+ `refund_id` Nullable(String),
+ `payment_method_id` Nullable(String),
+ `payment_method` Nullable(String),
+ `payment_method_type` Nullable(String),
+ `customer_id` Nullable(String),
+ `user_id` Nullable(String),
+ `connector` Nullable(String),
+ `request_id` String,
+ `flow_type` LowCardinality(String),
+ `api_flow` LowCardinality(String),
+ `api_auth_type` LowCardinality(String),
+ `request` String,
+ `response` Nullable(String),
+ `error` Nullable(String),
+ `authentication_data` Nullable(String),
+ `status_code` UInt32,
+ `created_at_timestamp` DateTime64(3),
+ `latency` UInt128,
+ `user_agent` String,
+ `ip_addr` String,
+ `hs_latency` Nullable(UInt128),
+ `http_method` LowCardinality(String),
+ `url_path` String,
+ `dispute_id` Nullable(String)
+) AS
+SELECT
+ merchant_id,
+ payment_id,
+ refund_id,
+ payment_method_id,
+ payment_method,
+ payment_method_type,
+ customer_id,
+ user_id,
+ connector,
+ request_id,
+ flow_type,
+ api_flow,
+ api_auth_type,
+ request,
+ response,
+ error,
+ authentication_data,
+ status_code,
+ created_at_timestamp,
+ now() as inserted_at,
+ latency,
+ user_agent,
+ ip_addr,
+ hs_latency,
+ http_method,
+ url_path,
+ dispute_id
+FROM
+ api_events_queue
+where length(_error) = 0;
+
+
+CREATE MATERIALIZED VIEW api_events_parse_errors
+(
+ `topic` String,
+ `partition` Int64,
+ `offset` Int64,
+ `raw` String,
+ `error` String
+)
+ENGINE = MergeTree
+ORDER BY (topic, partition, offset)
+SETTINGS index_granularity = 8192 AS
+SELECT
+ _topic AS topic,
+ _partition AS partition,
+ _offset AS offset,
+ _raw_message AS raw,
+ _error AS error
+FROM api_events_queue
+WHERE length(_error) > 0
+;
diff --git a/crates/analytics/docs/clickhouse/scripts/connector_events.sql b/crates/analytics/docs/clickhouse/scripts/connector_events.sql
new file mode 100644
index 000000000000..4a53f9edb0bf
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/scripts/connector_events.sql
@@ -0,0 +1,105 @@
+CREATE TABLE connector_events_queue (
+ `merchant_id` String,
+ `payment_id` Nullable(String),
+ `connector_name` LowCardinality(String),
+ `request_id` String,
+ `flow` LowCardinality(String),
+ `request` String,
+ `response` Nullable(String),
+ `error` Nullable(String),
+ `status_code` UInt32,
+ `created_at` DateTime64(3),
+ `latency` UInt128,
+ `method` LowCardinality(String),
+ `refund_id` Nullable(String),
+ `dispute_id` Nullable(String)
+) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
+kafka_topic_list = 'hyperswitch-connector-api-events',
+kafka_group_name = 'hyper-c1',
+kafka_format = 'JSONEachRow',
+kafka_handle_error_mode = 'stream';
+
+
+CREATE TABLE connector_events_dist (
+ `merchant_id` String,
+ `payment_id` Nullable(String),
+ `connector_name` LowCardinality(String),
+ `request_id` String,
+ `flow` LowCardinality(String),
+ `request` String,
+ `response` Nullable(String),
+ `error` Nullable(String),
+ `status_code` UInt32,
+ `created_at` DateTime64(3),
+ `inserted_at` DateTime64(3),
+ `latency` UInt128,
+ `method` LowCardinality(String),
+ `refund_id` Nullable(String),
+ `dispute_id` Nullable(String),
+ INDEX flowIndex flowTYPE bloom_filter GRANULARITY 1,
+ INDEX connectorIndex connector_name TYPE bloom_filter GRANULARITY 1,
+ INDEX statusIndex status_code TYPE bloom_filter GRANULARITY 1
+) ENGINE = MergeTree
+PARTITION BY toStartOfDay(created_at)
+ORDER BY
+ (created_at, merchant_id, flow_type, status_code, api_flow)
+TTL created_at + toIntervalMonth(6)
+;
+
+CREATE MATERIALIZED VIEW connector_events_mv TO connector_events_dist (
+ `merchant_id` String,
+ `payment_id` Nullable(String),
+ `connector_name` LowCardinality(String),
+ `request_id` String,
+ `flow` LowCardinality(String),
+ `request` String,
+ `response` Nullable(String),
+ `error` Nullable(String),
+ `status_code` UInt32,
+ `created_at` DateTime64(3),
+ `latency` UInt128,
+ `method` LowCardinality(String),
+ `refund_id` Nullable(String),
+ `dispute_id` Nullable(String)
+) AS
+SELECT
+ merchant_id,
+ payment_id,
+ connector_name,
+ request_id,
+ flow,
+ request,
+ response,
+ error,
+ status_code,
+ created_at,
+ now() as inserted_at,
+ latency,
+ method,
+ refund_id,
+ dispute_id
+FROM
+ connector_events_queue
+where length(_error) = 0;
+
+
+CREATE MATERIALIZED VIEW connector_events_parse_errors
+(
+ `topic` String,
+ `partition` Int64,
+ `offset` Int64,
+ `raw` String,
+ `error` String
+)
+ENGINE = MergeTree
+ORDER BY (topic, partition, offset)
+SETTINGS index_granularity = 8192 AS
+SELECT
+ _topic AS topic,
+ _partition AS partition,
+ _offset AS offset,
+ _raw_message AS raw,
+ _error AS error
+FROM connector_events_queue
+WHERE length(_error) > 0
+;
diff --git a/crates/analytics/docs/clickhouse/scripts/outgoing_webhook_events.sql b/crates/analytics/docs/clickhouse/scripts/outgoing_webhook_events.sql
new file mode 100644
index 000000000000..3dc907629d0a
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/scripts/outgoing_webhook_events.sql
@@ -0,0 +1,109 @@
+CREATE TABLE
+ outgoing_webhook_events_queue (
+ `merchant_id` String,
+ `event_id` Nullable(String),
+ `event_type` LowCardinality(String),
+ `outgoing_webhook_event_type` LowCardinality(String),
+ `payment_id` Nullable(String),
+ `refund_id` Nullable(String),
+ `attempt_id` Nullable(String),
+ `dispute_id` Nullable(String),
+ `payment_method_id` Nullable(String),
+ `mandate_id` Nullable(String),
+ `content` Nullable(String),
+ `is_error` Bool,
+ `error` Nullable(String),
+ `created_at_timestamp` DateTime64(3)
+ ) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
+ kafka_topic_list = 'hyperswitch-outgoing-webhook-events',
+ kafka_group_name = 'hyper-c1',
+ kafka_format = 'JSONEachRow',
+ kafka_handle_error_mode = 'stream';
+
+CREATE TABLE
+ outgoing_webhook_events_cluster (
+ `merchant_id` String,
+ `event_id` String,
+ `event_type` LowCardinality(String),
+ `outgoing_webhook_event_type` LowCardinality(String),
+ `payment_id` Nullable(String),
+ `refund_id` Nullable(String),
+ `attempt_id` Nullable(String),
+ `dispute_id` Nullable(String),
+ `payment_method_id` Nullable(String),
+ `mandate_id` Nullable(String),
+ `content` Nullable(String),
+ `is_error` Bool,
+ `error` Nullable(String),
+ `created_at_timestamp` DateTime64(3),
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ INDEX eventIndex event_type TYPE bloom_filter GRANULARITY 1,
+ INDEX webhookeventIndex outgoing_webhook_event_type TYPE bloom_filter GRANULARITY 1
+ ) ENGINE = MergeTree PARTITION BY toStartOfDay(created_at_timestamp)
+ORDER BY (
+ created_at_timestamp,
+ merchant_id,
+ event_id,
+ event_type,
+ outgoing_webhook_event_type
+ ) TTL inserted_at + toIntervalMonth(6);
+
+CREATE MATERIALIZED VIEW outgoing_webhook_events_mv TO outgoing_webhook_events_cluster (
+ `merchant_id` String,
+ `event_id` Nullable(String),
+ `event_type` LowCardinality(String),
+ `outgoing_webhook_event_type` LowCardinality(String),
+ `payment_id` Nullable(String),
+ `refund_id` Nullable(String),
+ `attempt_id` Nullable(String),
+ `dispute_id` Nullable(String),
+ `payment_method_id` Nullable(String),
+ `mandate_id` Nullable(String),
+ `content` Nullable(String),
+ `is_error` Bool,
+ `error` Nullable(String),
+ `created_at_timestamp` DateTime64(3),
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+) AS
+SELECT
+ merchant_id,
+ event_id,
+ event_type,
+ outgoing_webhook_event_type,
+ payment_id,
+ refund_id,
+ attempt_id,
+ dispute_id,
+ payment_method_id,
+ mandate_id,
+ content,
+ is_error,
+ error,
+ created_at_timestamp,
+ now() AS inserted_at
+FROM
+ outgoing_webhook_events_queue
+where length(_error) = 0;
+
+CREATE MATERIALIZED VIEW outgoing_webhook_parse_errors (
+ `topic` String,
+ `partition` Int64,
+ `offset` Int64,
+ `raw` String,
+ `error` String
+) ENGINE = MergeTree
+ORDER BY (
+ topic, partition,
+ offset
+ ) SETTINGS index_granularity = 8192 AS
+SELECT
+ _topic AS topic,
+ _partition AS partition,
+ _offset AS
+offset
+,
+ _raw_message AS raw,
+ _error AS error
+FROM
+ outgoing_webhook_events_queue
+WHERE length(_error) > 0;
\ No newline at end of file
diff --git a/crates/analytics/docs/clickhouse/scripts/payment_attempts.sql b/crates/analytics/docs/clickhouse/scripts/payment_attempts.sql
new file mode 100644
index 000000000000..276e311e57a9
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/scripts/payment_attempts.sql
@@ -0,0 +1,156 @@
+CREATE TABLE payment_attempts_queue (
+ `payment_id` String,
+ `merchant_id` String,
+ `attempt_id` String,
+ `status` LowCardinality(String),
+ `amount` Nullable(UInt32),
+ `currency` LowCardinality(Nullable(String)),
+ `connector` LowCardinality(Nullable(String)),
+ `save_to_locker` Nullable(Bool),
+ `error_message` Nullable(String),
+ `offer_amount` Nullable(UInt32),
+ `surcharge_amount` Nullable(UInt32),
+ `tax_amount` Nullable(UInt32),
+ `payment_method_id` Nullable(String),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_method_type` LowCardinality(Nullable(String)),
+ `connector_transaction_id` Nullable(String),
+ `capture_method` LowCardinality(Nullable(String)),
+ `capture_on` Nullable(DateTime) CODEC(T64, LZ4),
+ `confirm` Bool,
+ `authentication_type` LowCardinality(Nullable(String)),
+ `cancellation_reason` Nullable(String),
+ `amount_to_capture` Nullable(UInt32),
+ `mandate_id` Nullable(String),
+ `browser_info` Nullable(String),
+ `error_code` Nullable(String),
+ `connector_metadata` Nullable(String),
+ `payment_experience` Nullable(String),
+ `created_at` DateTime CODEC(T64, LZ4),
+ `last_synced` Nullable(DateTime) CODEC(T64, LZ4),
+ `modified_at` DateTime CODEC(T64, LZ4),
+ `sign_flag` Int8
+) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
+kafka_topic_list = 'hyperswitch-payment-attempt-events',
+kafka_group_name = 'hyper-c1',
+kafka_format = 'JSONEachRow',
+kafka_handle_error_mode = 'stream';
+
+CREATE TABLE payment_attempt_dist (
+ `payment_id` String,
+ `merchant_id` String,
+ `attempt_id` String,
+ `status` LowCardinality(String),
+ `amount` Nullable(UInt32),
+ `currency` LowCardinality(Nullable(String)),
+ `connector` LowCardinality(Nullable(String)),
+ `save_to_locker` Nullable(Bool),
+ `error_message` Nullable(String),
+ `offer_amount` Nullable(UInt32),
+ `surcharge_amount` Nullable(UInt32),
+ `tax_amount` Nullable(UInt32),
+ `payment_method_id` Nullable(String),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_method_type` LowCardinality(Nullable(String)),
+ `connector_transaction_id` Nullable(String),
+ `capture_method` Nullable(String),
+ `capture_on` Nullable(DateTime) CODEC(T64, LZ4),
+ `confirm` Bool,
+ `authentication_type` LowCardinality(Nullable(String)),
+ `cancellation_reason` Nullable(String),
+ `amount_to_capture` Nullable(UInt32),
+ `mandate_id` Nullable(String),
+ `browser_info` Nullable(String),
+ `error_code` Nullable(String),
+ `connector_metadata` Nullable(String),
+ `payment_experience` Nullable(String),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `last_synced` Nullable(DateTime) CODEC(T64, LZ4),
+ `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `sign_flag` Int8,
+ INDEX connectorIndex connector TYPE bloom_filter GRANULARITY 1,
+ INDEX paymentMethodIndex payment_method TYPE bloom_filter GRANULARITY 1,
+ INDEX authenticationTypeIndex authentication_type TYPE bloom_filter GRANULARITY 1,
+ INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1,
+ INDEX statusIndex status TYPE bloom_filter GRANULARITY 1
+) ENGINE = CollapsingMergeTree(
+ sign_flag
+)
+PARTITION BY toStartOfDay(created_at)
+ORDER BY
+ (created_at, merchant_id, attempt_id)
+TTL created_at + toIntervalMonth(6)
+;
+
+
+CREATE MATERIALIZED VIEW kafka_parse_pa TO payment_attempt_dist (
+ `payment_id` String,
+ `merchant_id` String,
+ `attempt_id` String,
+ `status` LowCardinality(String),
+ `amount` Nullable(UInt32),
+ `currency` LowCardinality(Nullable(String)),
+ `connector` LowCardinality(Nullable(String)),
+ `save_to_locker` Nullable(Bool),
+ `error_message` Nullable(String),
+ `offer_amount` Nullable(UInt32),
+ `surcharge_amount` Nullable(UInt32),
+ `tax_amount` Nullable(UInt32),
+ `payment_method_id` Nullable(String),
+ `payment_method` LowCardinality(Nullable(String)),
+ `payment_method_type` LowCardinality(Nullable(String)),
+ `connector_transaction_id` Nullable(String),
+ `capture_method` Nullable(String),
+ `confirm` Bool,
+ `authentication_type` LowCardinality(Nullable(String)),
+ `cancellation_reason` Nullable(String),
+ `amount_to_capture` Nullable(UInt32),
+ `mandate_id` Nullable(String),
+ `browser_info` Nullable(String),
+ `error_code` Nullable(String),
+ `connector_metadata` Nullable(String),
+ `payment_experience` Nullable(String),
+ `created_at` DateTime64(3),
+ `capture_on` Nullable(DateTime64(3)),
+ `last_synced` Nullable(DateTime64(3)),
+ `modified_at` DateTime64(3),
+ `inserted_at` DateTime64(3),
+ `sign_flag` Int8
+) AS
+SELECT
+ payment_id,
+ merchant_id,
+ attempt_id,
+ status,
+ amount,
+ currency,
+ connector,
+ save_to_locker,
+ error_message,
+ offer_amount,
+ surcharge_amount,
+ tax_amount,
+ payment_method_id,
+ payment_method,
+ payment_method_type,
+ connector_transaction_id,
+ capture_method,
+ confirm,
+ authentication_type,
+ cancellation_reason,
+ amount_to_capture,
+ mandate_id,
+ browser_info,
+ error_code,
+ connector_metadata,
+ payment_experience,
+ created_at,
+ capture_on,
+ last_synced,
+ modified_at,
+ now() as inserted_at,
+ sign_flag
+FROM
+ payment_attempts_queue;
+
diff --git a/crates/analytics/docs/clickhouse/scripts/payment_intents.sql b/crates/analytics/docs/clickhouse/scripts/payment_intents.sql
new file mode 100644
index 000000000000..8cd487f364b4
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/scripts/payment_intents.sql
@@ -0,0 +1,116 @@
+CREATE TABLE payment_intents_queue (
+ `payment_id` String,
+ `merchant_id` String,
+ `status` LowCardinality(String),
+ `amount` UInt32,
+ `currency` LowCardinality(Nullable(String)),
+ `amount_captured` Nullable(UInt32),
+ `customer_id` Nullable(String),
+ `description` Nullable(String),
+ `return_url` Nullable(String),
+ `connector_id` LowCardinality(Nullable(String)),
+ `statement_descriptor_name` Nullable(String),
+ `statement_descriptor_suffix` Nullable(String),
+ `setup_future_usage` LowCardinality(Nullable(String)),
+ `off_session` Nullable(Bool),
+ `client_secret` Nullable(String),
+ `active_attempt_id` String,
+ `business_country` String,
+ `business_label` String,
+ `modified_at` DateTime CODEC(T64, LZ4),
+ `created_at` DateTime CODEC(T64, LZ4),
+ `last_synced` Nullable(DateTime) CODEC(T64, LZ4),
+ `sign_flag` Int8
+) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
+kafka_topic_list = 'hyperswitch-payment-intent-events',
+kafka_group_name = 'hyper-c1',
+kafka_format = 'JSONEachRow',
+kafka_handle_error_mode = 'stream';
+
+
+CREATE TABLE payment_intents_dist (
+ `payment_id` String,
+ `merchant_id` String,
+ `status` LowCardinality(String),
+ `amount` UInt32,
+ `currency` LowCardinality(Nullable(String)),
+ `amount_captured` Nullable(UInt32),
+ `customer_id` Nullable(String),
+ `description` Nullable(String),
+ `return_url` Nullable(String),
+ `connector_id` LowCardinality(Nullable(String)),
+ `statement_descriptor_name` Nullable(String),
+ `statement_descriptor_suffix` Nullable(String),
+ `setup_future_usage` LowCardinality(Nullable(String)),
+ `off_session` Nullable(Bool),
+ `client_secret` Nullable(String),
+ `active_attempt_id` String,
+ `business_country` LowCardinality(String),
+ `business_label` String,
+ `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `last_synced` Nullable(DateTime) CODEC(T64, LZ4),
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `sign_flag` Int8,
+ INDEX connectorIndex connector_id TYPE bloom_filter GRANULARITY 1,
+ INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1,
+ INDEX statusIndex status TYPE bloom_filter GRANULARITY 1
+) ENGINE = CollapsingMergeTree(
+ sign_flag
+)
+PARTITION BY toStartOfDay(created_at)
+ORDER BY
+ (created_at, merchant_id, payment_id)
+TTL created_at + toIntervalMonth(6)
+;
+
+CREATE MATERIALIZED VIEW kafka_parse_payment_intent TO payment_intents_dist (
+ `payment_id` String,
+ `merchant_id` String,
+ `status` LowCardinality(String),
+ `amount` UInt32,
+ `currency` LowCardinality(Nullable(String)),
+ `amount_captured` Nullable(UInt32),
+ `customer_id` Nullable(String),
+ `description` Nullable(String),
+ `return_url` Nullable(String),
+ `connector_id` LowCardinality(Nullable(String)),
+ `statement_descriptor_name` Nullable(String),
+ `statement_descriptor_suffix` Nullable(String),
+ `setup_future_usage` LowCardinality(Nullable(String)),
+ `off_session` Nullable(Bool),
+ `client_secret` Nullable(String),
+ `active_attempt_id` String,
+ `business_country` LowCardinality(String),
+ `business_label` String,
+ `modified_at` DateTime64(3),
+ `created_at` DateTime64(3),
+ `last_synced` Nullable(DateTime64(3)),
+ `inserted_at` DateTime64(3),
+ `sign_flag` Int8
+) AS
+SELECT
+ payment_id,
+ merchant_id,
+ status,
+ amount,
+ currency,
+ amount_captured,
+ customer_id,
+ description,
+ return_url,
+ connector_id,
+ statement_descriptor_name,
+ statement_descriptor_suffix,
+ setup_future_usage,
+ off_session,
+ client_secret,
+ active_attempt_id,
+ business_country,
+ business_label,
+ modified_at,
+ created_at,
+ last_synced,
+ now() as inserted_at,
+ sign_flag
+FROM payment_intents_queue;
diff --git a/crates/analytics/docs/clickhouse/scripts/refunds.sql b/crates/analytics/docs/clickhouse/scripts/refunds.sql
new file mode 100644
index 000000000000..a131270c1326
--- /dev/null
+++ b/crates/analytics/docs/clickhouse/scripts/refunds.sql
@@ -0,0 +1,121 @@
+CREATE TABLE refund_queue (
+ `internal_reference_id` String,
+ `refund_id` String,
+ `payment_id` String,
+ `merchant_id` String,
+ `connector_transaction_id` String,
+ `connector` LowCardinality(Nullable(String)),
+ `connector_refund_id` Nullable(String),
+ `external_reference_id` Nullable(String),
+ `refund_type` LowCardinality(String),
+ `total_amount` Nullable(UInt32),
+ `currency` LowCardinality(String),
+ `refund_amount` Nullable(UInt32),
+ `refund_status` LowCardinality(String),
+ `sent_to_gateway` Bool,
+ `refund_error_message` Nullable(String),
+ `refund_arn` Nullable(String),
+ `attempt_id` String,
+ `description` Nullable(String),
+ `refund_reason` Nullable(String),
+ `refund_error_code` Nullable(String),
+ `created_at` DateTime CODEC(T64, LZ4),
+ `modified_at` DateTime CODEC(T64, LZ4),
+ `sign_flag` Int8
+) ENGINE = Kafka SETTINGS kafka_broker_list = 'kafka0:29092',
+kafka_topic_list = 'hyperswitch-refund-events',
+kafka_group_name = 'hyper-c1',
+kafka_format = 'JSONEachRow',
+kafka_handle_error_mode = 'stream';
+
+
+CREATE TABLE refund_dist (
+ `internal_reference_id` String,
+ `refund_id` String,
+ `payment_id` String,
+ `merchant_id` String,
+ `connector_transaction_id` String,
+ `connector` LowCardinality(Nullable(String)),
+ `connector_refund_id` Nullable(String),
+ `external_reference_id` Nullable(String),
+ `refund_type` LowCardinality(String),
+ `total_amount` Nullable(UInt32),
+ `currency` LowCardinality(String),
+ `refund_amount` Nullable(UInt32),
+ `refund_status` LowCardinality(String),
+ `sent_to_gateway` Bool,
+ `refund_error_message` Nullable(String),
+ `refund_arn` Nullable(String),
+ `attempt_id` String,
+ `description` Nullable(String),
+ `refund_reason` Nullable(String),
+ `refund_error_code` Nullable(String),
+ `created_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `modified_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `inserted_at` DateTime DEFAULT now() CODEC(T64, LZ4),
+ `sign_flag` Int8,
+ INDEX connectorIndex connector TYPE bloom_filter GRANULARITY 1,
+ INDEX refundTypeIndex refund_type TYPE bloom_filter GRANULARITY 1,
+ INDEX currencyIndex currency TYPE bloom_filter GRANULARITY 1,
+ INDEX statusIndex refund_status TYPE bloom_filter GRANULARITY 1
+) ENGINE = CollapsingMergeTree(
+ sign_flag
+)
+PARTITION BY toStartOfDay(created_at)
+ORDER BY
+ (created_at, merchant_id, refund_id)
+TTL created_at + toIntervalMonth(6)
+;
+
+CREATE MATERIALIZED VIEW kafka_parse_refund TO refund_dist (
+ `internal_reference_id` String,
+ `refund_id` String,
+ `payment_id` String,
+ `merchant_id` String,
+ `connector_transaction_id` String,
+ `connector` LowCardinality(Nullable(String)),
+ `connector_refund_id` Nullable(String),
+ `external_reference_id` Nullable(String),
+ `refund_type` LowCardinality(String),
+ `total_amount` Nullable(UInt32),
+ `currency` LowCardinality(String),
+ `refund_amount` Nullable(UInt32),
+ `refund_status` LowCardinality(String),
+ `sent_to_gateway` Bool,
+ `refund_error_message` Nullable(String),
+ `refund_arn` Nullable(String),
+ `attempt_id` String,
+ `description` Nullable(String),
+ `refund_reason` Nullable(String),
+ `refund_error_code` Nullable(String),
+ `created_at` DateTime64(3),
+ `modified_at` DateTime64(3),
+ `inserted_at` DateTime64(3),
+ `sign_flag` Int8
+) AS
+SELECT
+ internal_reference_id,
+ refund_id,
+ payment_id,
+ merchant_id,
+ connector_transaction_id,
+ connector,
+ connector_refund_id,
+ external_reference_id,
+ refund_type,
+ total_amount,
+ currency,
+ refund_amount,
+ refund_status,
+ sent_to_gateway,
+ refund_error_message,
+ refund_arn,
+ attempt_id,
+ description,
+ refund_reason,
+ refund_error_code,
+ created_at,
+ modified_at,
+ now() as inserted_at,
+ sign_flag
+FROM refund_queue;
diff --git a/crates/analytics/src/api_event.rs b/crates/analytics/src/api_event.rs
new file mode 100644
index 000000000000..113344d47254
--- /dev/null
+++ b/crates/analytics/src/api_event.rs
@@ -0,0 +1,9 @@
+mod core;
+pub mod events;
+pub mod filters;
+pub mod metrics;
+pub mod types;
+
+pub trait APIEventAnalytics: events::ApiLogsFilterAnalytics {}
+
+pub use self::core::{api_events_core, get_api_event_metrics, get_filters};
diff --git a/crates/analytics/src/api_event/core.rs b/crates/analytics/src/api_event/core.rs
new file mode 100644
index 000000000000..81b82c5dce15
--- /dev/null
+++ b/crates/analytics/src/api_event/core.rs
@@ -0,0 +1,181 @@
+use std::collections::HashMap;
+
+use api_models::analytics::{
+ api_event::{
+ ApiEventMetricsBucketIdentifier, ApiEventMetricsBucketValue, ApiLogsRequest,
+ ApiMetricsBucketResponse,
+ },
+ AnalyticsMetadata, ApiEventFiltersResponse, GetApiEventFiltersRequest,
+ GetApiEventMetricRequest, MetricsResponse,
+};
+use common_utils::errors::ReportSwitchExt;
+use error_stack::{IntoReport, ResultExt};
+use router_env::{
+ instrument, logger,
+ tracing::{self, Instrument},
+};
+
+use super::{
+ events::{get_api_event, ApiLogsResult},
+ metrics::ApiEventMetricRow,
+};
+use crate::{
+ errors::{AnalyticsError, AnalyticsResult},
+ metrics,
+ types::FiltersError,
+ AnalyticsProvider,
+};
+
+#[instrument(skip_all)]
+pub async fn api_events_core(
+ pool: &AnalyticsProvider,
+ req: ApiLogsRequest,
+ merchant_id: String,
+) -> AnalyticsResult> {
+ let data = match pool {
+ AnalyticsProvider::Sqlx(_) => Err(FiltersError::NotImplemented(
+ "API Events not implemented for SQLX",
+ ))
+ .into_report()
+ .attach_printable("SQL Analytics is not implemented for API Events"),
+ AnalyticsProvider::Clickhouse(pool) => get_api_event(&merchant_id, req, pool).await,
+ AnalyticsProvider::CombinedSqlx(_sqlx_pool, ckh_pool)
+ | AnalyticsProvider::CombinedCkh(_sqlx_pool, ckh_pool) => {
+ get_api_event(&merchant_id, req, ckh_pool).await
+ }
+ }
+ .switch()?;
+ Ok(data)
+}
+
+pub async fn get_filters(
+ pool: &AnalyticsProvider,
+ req: GetApiEventFiltersRequest,
+ merchant_id: String,
+) -> AnalyticsResult {
+ use api_models::analytics::{api_event::ApiEventDimensions, ApiEventFilterValue};
+
+ use super::filters::get_api_event_filter_for_dimension;
+ use crate::api_event::filters::ApiEventFilter;
+
+ let mut res = ApiEventFiltersResponse::default();
+ for dim in req.group_by_names {
+ let values = match pool {
+ AnalyticsProvider::Sqlx(_pool) => Err(FiltersError::NotImplemented(
+ "API Events not implemented for SQLX",
+ ))
+ .into_report()
+ .attach_printable("SQL Analytics is not implemented for API Events"),
+ AnalyticsProvider::Clickhouse(ckh_pool)
+ | AnalyticsProvider::CombinedSqlx(_, ckh_pool)
+ | AnalyticsProvider::CombinedCkh(_, ckh_pool) => {
+ get_api_event_filter_for_dimension(dim, &merchant_id, &req.time_range, ckh_pool)
+ .await
+ }
+ }
+ .switch()?
+ .into_iter()
+ .filter_map(|fil: ApiEventFilter| match dim {
+ ApiEventDimensions::StatusCode => fil.status_code.map(|i| i.to_string()),
+ ApiEventDimensions::FlowType => fil.flow_type,
+ ApiEventDimensions::ApiFlow => fil.api_flow,
+ })
+ .collect::>();
+ res.query_data.push(ApiEventFilterValue {
+ dimension: dim,
+ values,
+ })
+ }
+
+ Ok(res)
+}
+
+#[instrument(skip_all)]
+pub async fn get_api_event_metrics(
+ pool: &AnalyticsProvider,
+ merchant_id: &str,
+ req: GetApiEventMetricRequest,
+) -> AnalyticsResult> {
+ let mut metrics_accumulator: HashMap =
+ HashMap::new();
+
+ let mut set = tokio::task::JoinSet::new();
+ for metric_type in req.metrics.iter().cloned() {
+ let req = req.clone();
+ let pool = pool.clone();
+ let task_span = tracing::debug_span!(
+ "analytics_api_metrics_query",
+ api_event_metric = metric_type.as_ref()
+ );
+
+ // TODO: lifetime issues with joinset,
+ // can be optimized away if joinset lifetime requirements are relaxed
+ let merchant_id_scoped = merchant_id.to_owned();
+ set.spawn(
+ async move {
+ let data = pool
+ .get_api_event_metrics(
+ &metric_type,
+ &req.group_by_names.clone(),
+ &merchant_id_scoped,
+ &req.filters,
+ &req.time_series.map(|t| t.granularity),
+ &req.time_range,
+ )
+ .await
+ .change_context(AnalyticsError::UnknownError);
+ (metric_type, data)
+ }
+ .instrument(task_span),
+ );
+ }
+
+ while let Some((metric, data)) = set
+ .join_next()
+ .await
+ .transpose()
+ .into_report()
+ .change_context(AnalyticsError::UnknownError)?
+ {
+ let data = data?;
+ let attributes = &[
+ metrics::request::add_attributes("metric_type", metric.to_string()),
+ metrics::request::add_attributes("source", pool.to_string()),
+ ];
+
+ let value = u64::try_from(data.len());
+ if let Ok(val) = value {
+ metrics::BUCKETS_FETCHED.record(&metrics::CONTEXT, val, attributes);
+ logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val);
+ }
+ for (id, value) in data {
+ metrics_accumulator
+ .entry(id)
+ .and_modify(|data| {
+ data.api_count = data.api_count.or(value.api_count);
+ data.status_code_count = data.status_code_count.or(value.status_code_count);
+ data.latency = data.latency.or(value.latency);
+ })
+ .or_insert(value);
+ }
+ }
+
+ let query_data: Vec = metrics_accumulator
+ .into_iter()
+ .map(|(id, val)| ApiMetricsBucketResponse {
+ values: ApiEventMetricsBucketValue {
+ latency: val.latency,
+ api_count: val.api_count,
+ status_code_count: val.status_code_count,
+ },
+ dimensions: id,
+ })
+ .collect();
+
+ Ok(MetricsResponse {
+ query_data,
+ meta_data: [AnalyticsMetadata {
+ current_time_range: req.time_range,
+ }],
+ })
+}
diff --git a/crates/analytics/src/api_event/events.rs b/crates/analytics/src/api_event/events.rs
new file mode 100644
index 000000000000..eb9b2d561c53
--- /dev/null
+++ b/crates/analytics/src/api_event/events.rs
@@ -0,0 +1,107 @@
+use api_models::analytics::{
+ api_event::{ApiLogsRequest, QueryType},
+ Granularity,
+};
+use common_utils::errors::ReportSwitchExt;
+use error_stack::ResultExt;
+use router_env::Flow;
+use time::PrimitiveDateTime;
+
+use crate::{
+ query::{Aggregate, GroupByClause, QueryBuilder, ToSql, Window},
+ types::{AnalyticsCollection, AnalyticsDataSource, FiltersError, FiltersResult, LoadRow},
+};
+pub trait ApiLogsFilterAnalytics: LoadRow {}
+
+pub async fn get_api_event(
+ merchant_id: &String,
+ query_param: ApiLogsRequest,
+ pool: &T,
+) -> FiltersResult>
+where
+ T: AnalyticsDataSource + ApiLogsFilterAnalytics,
+ PrimitiveDateTime: ToSql,
+ AnalyticsCollection: ToSql,
+ Granularity: GroupByClause,
+ Aggregate<&'static str>: ToSql,
+ Window<&'static str>: ToSql,
+{
+ let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::ApiEvents);
+ query_builder.add_select_column("*").switch()?;
+
+ query_builder
+ .add_filter_clause("merchant_id", merchant_id)
+ .switch()?;
+ match query_param.query_param {
+ QueryType::Payment { payment_id } => query_builder
+ .add_filter_clause("payment_id", payment_id)
+ .switch()?,
+ QueryType::Refund {
+ payment_id,
+ refund_id,
+ } => {
+ query_builder
+ .add_filter_clause("payment_id", payment_id)
+ .switch()?;
+ query_builder
+ .add_filter_clause("refund_id", refund_id)
+ .switch()?;
+ }
+ }
+ if let Some(list_api_name) = query_param.api_name_filter {
+ query_builder
+ .add_filter_in_range_clause("api_flow", &list_api_name)
+ .switch()?;
+ } else {
+ query_builder
+ .add_filter_in_range_clause(
+ "api_flow",
+ &[
+ Flow::PaymentsCancel,
+ Flow::PaymentsCapture,
+ Flow::PaymentsConfirm,
+ Flow::PaymentsCreate,
+ Flow::PaymentsStart,
+ Flow::PaymentsUpdate,
+ Flow::RefundsCreate,
+ Flow::IncomingWebhookReceive,
+ ],
+ )
+ .switch()?;
+ }
+ //TODO!: update the execute_query function to return reports instead of plain errors...
+ query_builder
+ .execute_query::(pool)
+ .await
+ .change_context(FiltersError::QueryBuildingError)?
+ .change_context(FiltersError::QueryExecutionFailure)
+}
+#[derive(Debug, serde::Serialize, serde::Deserialize)]
+pub struct ApiLogsResult {
+ pub merchant_id: String,
+ pub payment_id: Option,
+ pub refund_id: Option,
+ pub payment_method_id: Option,
+ pub payment_method: Option,
+ pub payment_method_type: Option,
+ pub customer_id: Option,
+ pub user_id: Option,
+ pub connector: Option,
+ pub request_id: Option,
+ pub flow_type: String,
+ pub api_flow: String,
+ pub api_auth_type: Option,
+ pub request: String,
+ pub response: Option,
+ pub error: Option,
+ pub authentication_data: Option,
+ pub status_code: u16,
+ pub latency: Option,
+ pub user_agent: Option,
+ pub hs_latency: Option,
+ pub ip_addr: Option,
+ #[serde(with = "common_utils::custom_serde::iso8601")]
+ pub created_at: PrimitiveDateTime,
+ pub http_method: Option,
+ pub url_path: Option,
+}
diff --git a/crates/analytics/src/api_event/filters.rs b/crates/analytics/src/api_event/filters.rs
new file mode 100644
index 000000000000..87414ebad4ba
--- /dev/null
+++ b/crates/analytics/src/api_event/filters.rs
@@ -0,0 +1,53 @@
+use api_models::analytics::{api_event::ApiEventDimensions, Granularity, TimeRange};
+use common_utils::errors::ReportSwitchExt;
+use error_stack::ResultExt;
+use time::PrimitiveDateTime;
+
+use crate::{
+ query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
+ types::{AnalyticsCollection, AnalyticsDataSource, FiltersError, FiltersResult, LoadRow},
+};
+
+pub trait ApiEventFilterAnalytics: LoadRow {}
+
+pub async fn get_api_event_filter_for_dimension(
+ dimension: ApiEventDimensions,
+ merchant_id: &String,
+ time_range: &TimeRange,
+ pool: &T,
+) -> FiltersResult>
+where
+ T: AnalyticsDataSource + ApiEventFilterAnalytics,
+ PrimitiveDateTime: ToSql,
+ AnalyticsCollection: ToSql,
+ Granularity: GroupByClause,
+ Aggregate<&'static str>: ToSql,
+ Window<&'static str>: ToSql,
+{
+ let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::ApiEvents);
+
+ query_builder.add_select_column(dimension).switch()?;
+ time_range
+ .set_filter_clause(&mut query_builder)
+ .attach_printable("Error filtering time range")
+ .switch()?;
+
+ query_builder
+ .add_filter_clause("merchant_id", merchant_id)
+ .switch()?;
+
+ query_builder.set_distinct();
+
+ query_builder
+ .execute_query::(pool)
+ .await
+ .change_context(FiltersError::QueryBuildingError)?
+ .change_context(FiltersError::QueryExecutionFailure)
+}
+
+#[derive(Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)]
+pub struct ApiEventFilter {
+ pub status_code: Option,
+ pub flow_type: Option,
+ pub api_flow: Option,
+}
diff --git a/crates/analytics/src/api_event/metrics.rs b/crates/analytics/src/api_event/metrics.rs
new file mode 100644
index 000000000000..16f2d7a2f5ab
--- /dev/null
+++ b/crates/analytics/src/api_event/metrics.rs
@@ -0,0 +1,110 @@
+use api_models::analytics::{
+ api_event::{
+ ApiEventDimensions, ApiEventFilters, ApiEventMetrics, ApiEventMetricsBucketIdentifier,
+ },
+ Granularity, TimeRange,
+};
+use time::PrimitiveDateTime;
+
+use crate::{
+ query::{Aggregate, GroupByClause, ToSql, Window},
+ types::{AnalyticsCollection, AnalyticsDataSource, LoadRow, MetricsResult},
+};
+
+mod api_count;
+pub mod latency;
+mod status_code_count;
+use api_count::ApiCount;
+use latency::MaxLatency;
+use status_code_count::StatusCodeCount;
+
+use self::latency::LatencyAvg;
+
+#[derive(Debug, PartialEq, Eq, serde::Deserialize)]
+pub struct ApiEventMetricRow {
+ pub latency: Option,
+ pub api_count: Option,
+ pub status_code_count: Option,
+ #[serde(with = "common_utils::custom_serde::iso8601::option")]
+ pub start_bucket: Option,
+ #[serde(with = "common_utils::custom_serde::iso8601::option")]
+ pub end_bucket: Option,
+}
+
+pub trait ApiEventMetricAnalytics: LoadRow + LoadRow {}
+
+#[async_trait::async_trait]
+pub trait ApiEventMetric
+where
+ T: AnalyticsDataSource + ApiEventMetricAnalytics,
+{
+ async fn load_metrics(
+ &self,
+ dimensions: &[ApiEventDimensions],
+ merchant_id: &str,
+ filters: &ApiEventFilters,
+ granularity: &Option,
+ time_range: &TimeRange,
+ pool: &T,
+ ) -> MetricsResult>;
+}
+
+#[async_trait::async_trait]
+impl ApiEventMetric for ApiEventMetrics
+where
+ T: AnalyticsDataSource + ApiEventMetricAnalytics,
+ PrimitiveDateTime: ToSql,
+ AnalyticsCollection: ToSql,
+ Granularity: GroupByClause,
+ Aggregate<&'static str>: ToSql,
+ Window<&'static str>: ToSql,
+{
+ async fn load_metrics(
+ &self,
+ dimensions: &[ApiEventDimensions],
+ merchant_id: &str,
+ filters: &ApiEventFilters,
+ granularity: &Option,
+ time_range: &TimeRange,
+ pool: &T,
+ ) -> MetricsResult> {
+ match self {
+ Self::Latency => {
+ MaxLatency
+ .load_metrics(
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ pool,
+ )
+ .await
+ }
+ Self::ApiCount => {
+ ApiCount
+ .load_metrics(
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ pool,
+ )
+ .await
+ }
+ Self::StatusCodeCount => {
+ StatusCodeCount
+ .load_metrics(
+ dimensions,
+ merchant_id,
+ filters,
+ granularity,
+ time_range,
+ pool,
+ )
+ .await
+ }
+ }
+ }
+}
diff --git a/crates/analytics/src/api_event/metrics/api_count.rs b/crates/analytics/src/api_event/metrics/api_count.rs
new file mode 100644
index 000000000000..7f5f291aa53e
--- /dev/null
+++ b/crates/analytics/src/api_event/metrics/api_count.rs
@@ -0,0 +1,106 @@
+use api_models::analytics::{
+ api_event::{ApiEventDimensions, ApiEventFilters, ApiEventMetricsBucketIdentifier},
+ Granularity, TimeRange,
+};
+use common_utils::errors::ReportSwitchExt;
+use error_stack::ResultExt;
+use time::PrimitiveDateTime;
+
+use super::ApiEventMetricRow;
+use crate::{
+ query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
+ types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
+};
+
+#[derive(Default)]
+pub(super) struct ApiCount;
+
+#[async_trait::async_trait]
+impl super::ApiEventMetric for ApiCount
+where
+ T: AnalyticsDataSource + super::ApiEventMetricAnalytics,
+ PrimitiveDateTime: ToSql,
+ AnalyticsCollection: ToSql,
+ Granularity: GroupByClause,
+ Aggregate<&'static str>: ToSql,
+ Window<&'static str>: ToSql,
+{
+ async fn load_metrics(
+ &self,
+ _dimensions: &[ApiEventDimensions],
+ merchant_id: &str,
+ filters: &ApiEventFilters,
+ granularity: &Option,
+ time_range: &TimeRange,
+ pool: &T,
+ ) -> MetricsResult> {
+ let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::ApiEvents);
+
+ query_builder
+ .add_select_column(Aggregate::Count {
+ field: None,
+ alias: Some("api_count"),
+ })
+ .switch()?;
+ if !filters.flow_type.is_empty() {
+ query_builder
+ .add_filter_in_range_clause(ApiEventDimensions::FlowType, &filters.flow_type)
+ .attach_printable("Error adding flow_type filter")
+ .switch()?;
+ }
+ query_builder
+ .add_select_column(Aggregate::Min {
+ field: "created_at",
+ alias: Some("start_bucket"),
+ })
+ .switch()?;
+ query_builder
+ .add_select_column(Aggregate::Max {
+ field: "created_at",
+ alias: Some("end_bucket"),
+ })
+ .switch()?;
+ if let Some(granularity) = granularity.as_ref() {
+ granularity
+ .set_group_by_clause(&mut query_builder)
+ .attach_printable("Error adding granularity")
+ .switch()?;
+ }
+
+ query_builder
+ .add_filter_clause("merchant_id", merchant_id)
+ .switch()?;
+
+ time_range
+ .set_filter_clause(&mut query_builder)
+ .attach_printable("Error filtering time range")
+ .switch()?;
+
+ query_builder
+ .execute_query::(pool)
+ .await
+ .change_context(MetricsError::QueryBuildingError)?
+ .change_context(MetricsError::QueryExecutionFailure)?
+ .into_iter()
+ .map(|i| {
+ Ok((
+ ApiEventMetricsBucketIdentifier::new(TimeRange {
+ start_time: match (granularity, i.start_bucket) {
+ (Some(g), Some(st)) => g.clip_to_start(st)?,
+ _ => time_range.start_time,
+ },
+ end_time: granularity.as_ref().map_or_else(
+ || Ok(time_range.end_time),
+ |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
+ )?,
+ }),
+ i,
+ ))
+ })
+ .collect::,
+ crate::query::PostProcessingError,
+ >>()
+ .change_context(MetricsError::PostProcessingFailure)
+ }
+}
diff --git a/crates/analytics/src/api_event/metrics/latency.rs b/crates/analytics/src/api_event/metrics/latency.rs
new file mode 100644
index 000000000000..379b39fbeb9e
--- /dev/null
+++ b/crates/analytics/src/api_event/metrics/latency.rs
@@ -0,0 +1,138 @@
+use api_models::analytics::{
+ api_event::{ApiEventDimensions, ApiEventFilters, ApiEventMetricsBucketIdentifier},
+ Granularity, TimeRange,
+};
+use common_utils::errors::ReportSwitchExt;
+use error_stack::ResultExt;
+use time::PrimitiveDateTime;
+
+use super::ApiEventMetricRow;
+use crate::{
+ query::{
+ Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql,
+ Window,
+ },
+ types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
+};
+
+#[derive(Default)]
+pub(super) struct MaxLatency;
+
+#[async_trait::async_trait]
+impl super::ApiEventMetric for MaxLatency
+where
+ T: AnalyticsDataSource + super::ApiEventMetricAnalytics,
+ PrimitiveDateTime: ToSql,
+ AnalyticsCollection: ToSql,
+ Granularity: GroupByClause,
+ Aggregate<&'static str>: ToSql,
+ Window<&'static str>: ToSql,
+{
+ async fn load_metrics(
+ &self,
+ _dimensions: &[ApiEventDimensions],
+ merchant_id: &str,
+ filters: &ApiEventFilters,
+ granularity: &Option,
+ time_range: &TimeRange,
+ pool: &T,
+ ) -> MetricsResult> {
+ let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::ApiEvents);
+
+ query_builder
+ .add_select_column(Aggregate::Sum {
+ field: "latency",
+ alias: Some("latency_sum"),
+ })
+ .switch()?;
+
+ query_builder
+ .add_select_column(Aggregate::Count {
+ field: Some("latency"),
+ alias: Some("latency_count"),
+ })
+ .switch()?;
+
+ query_builder
+ .add_select_column(Aggregate::Min {
+ field: "created_at",
+ alias: Some("start_bucket"),
+ })
+ .switch()?;
+ query_builder
+ .add_select_column(Aggregate::Max {
+ field: "created_at",
+ alias: Some("end_bucket"),
+ })
+ .switch()?;
+ if let Some(granularity) = granularity.as_ref() {
+ granularity
+ .set_group_by_clause(&mut query_builder)
+ .attach_printable("Error adding granularity")
+ .switch()?;
+ }
+
+ filters.set_filter_clause(&mut query_builder).switch()?;
+
+ query_builder
+ .add_filter_clause("merchant_id", merchant_id)
+ .switch()?;
+
+ time_range
+ .set_filter_clause(&mut query_builder)
+ .attach_printable("Error filtering time range")
+ .switch()?;
+
+ query_builder
+ .add_custom_filter_clause("request", "10.63.134.6", FilterTypes::NotLike)
+ .attach_printable("Error filtering out locker IP")
+ .switch()?;
+
+ query_builder
+ .execute_query::(pool)
+ .await
+ .change_context(MetricsError::QueryBuildingError)?
+ .change_context(MetricsError::QueryExecutionFailure)?
+ .into_iter()
+ .map(|i| {
+ Ok((
+ ApiEventMetricsBucketIdentifier::new(TimeRange {
+ start_time: match (granularity, i.start_bucket) {
+ (Some(g), Some(st)) => g.clip_to_start(st)?,
+ _ => time_range.start_time,
+ },
+ end_time: granularity.as_ref().map_or_else(
+ || Ok(time_range.end_time),
+ |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
+ )?,
+ }),
+ ApiEventMetricRow {
+ latency: if i.latency_count != 0 {
+ Some(i.latency_sum.unwrap_or(0) / i.latency_count)
+ } else {
+ None
+ },
+ api_count: None,
+ status_code_count: None,
+ start_bucket: i.start_bucket,
+ end_bucket: i.end_bucket,
+ },
+ ))
+ })
+ .collect::,
+ crate::query::PostProcessingError,
+ >>()
+ .change_context(MetricsError::PostProcessingFailure)
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, serde::Deserialize)]
+pub struct LatencyAvg {
+ latency_sum: Option,
+ latency_count: u64,
+ #[serde(with = "common_utils::custom_serde::iso8601::option")]
+ pub start_bucket: Option,
+ #[serde(with = "common_utils::custom_serde::iso8601::option")]
+ pub end_bucket: Option,
+}
diff --git a/crates/analytics/src/api_event/metrics/status_code_count.rs b/crates/analytics/src/api_event/metrics/status_code_count.rs
new file mode 100644
index 000000000000..5c652fd8e0c9
--- /dev/null
+++ b/crates/analytics/src/api_event/metrics/status_code_count.rs
@@ -0,0 +1,103 @@
+use api_models::analytics::{
+ api_event::{ApiEventDimensions, ApiEventFilters, ApiEventMetricsBucketIdentifier},
+ Granularity, TimeRange,
+};
+use common_utils::errors::ReportSwitchExt;
+use error_stack::ResultExt;
+use time::PrimitiveDateTime;
+
+use super::ApiEventMetricRow;
+use crate::{
+ query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
+ types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
+};
+
+#[derive(Default)]
+pub(super) struct StatusCodeCount;
+
+#[async_trait::async_trait]
+impl super::ApiEventMetric for StatusCodeCount
+where
+ T: AnalyticsDataSource + super::ApiEventMetricAnalytics,
+ PrimitiveDateTime: ToSql,
+ AnalyticsCollection: ToSql,
+ Granularity: GroupByClause,
+ Aggregate<&'static str>: ToSql,
+ Window<&'static str>: ToSql,
+{
+ async fn load_metrics(
+ &self,
+ _dimensions: &[ApiEventDimensions],
+ merchant_id: &str,
+ filters: &ApiEventFilters,
+ granularity: &Option,
+ time_range: &TimeRange,
+ pool: &T,
+ ) -> MetricsResult> {
+ let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::ApiEvents);
+
+ query_builder
+ .add_select_column(Aggregate::Count {
+ field: Some("status_code"),
+ alias: Some("status_code_count"),
+ })
+ .switch()?;
+
+ filters.set_filter_clause(&mut query_builder).switch()?;
+
+ query_builder
+ .add_filter_clause("merchant_id", merchant_id)
+ .switch()?;
+
+ time_range
+ .set_filter_clause(&mut query_builder)
+ .attach_printable("Error filtering time range")
+ .switch()?;
+
+ query_builder
+ .add_select_column(Aggregate::Min {
+ field: "created_at",
+ alias: Some("start_bucket"),
+ })
+ .switch()?;
+ query_builder
+ .add_select_column(Aggregate::Max {
+ field: "created_at",
+ alias: Some("end_bucket"),
+ })
+ .switch()?;
+ if let Some(granularity) = granularity.as_ref() {
+ granularity
+ .set_group_by_clause(&mut query_builder)
+ .attach_printable("Error adding granularity")
+ .switch()?;
+ }
+
+ query_builder
+ .execute_query::(pool)
+ .await
+ .change_context(MetricsError::QueryBuildingError)?
+ .change_context(MetricsError::QueryExecutionFailure)?
+ .into_iter()
+ .map(|i| {
+ Ok((
+ ApiEventMetricsBucketIdentifier::new(TimeRange {
+ start_time: match (granularity, i.start_bucket) {
+ (Some(g), Some(st)) => g.clip_to_start(st)?,
+ _ => time_range.start_time,
+ },
+ end_time: granularity.as_ref().map_or_else(
+ || Ok(time_range.end_time),
+ |g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
+ )?,
+ }),
+ i,
+ ))
+ })
+ .collect::,
+ crate::query::PostProcessingError,
+ >>()
+ .change_context(MetricsError::PostProcessingFailure)
+ }
+}
diff --git a/crates/analytics/src/api_event/types.rs b/crates/analytics/src/api_event/types.rs
new file mode 100644
index 000000000000..72205fc72abf
--- /dev/null
+++ b/crates/analytics/src/api_event/types.rs
@@ -0,0 +1,33 @@
+use api_models::analytics::api_event::{ApiEventDimensions, ApiEventFilters};
+use error_stack::ResultExt;
+
+use crate::{
+ query::{QueryBuilder, QueryFilter, QueryResult, ToSql},
+ types::{AnalyticsCollection, AnalyticsDataSource},
+};
+
+impl QueryFilter for ApiEventFilters
+where
+ T: AnalyticsDataSource,
+ AnalyticsCollection: ToSql,
+{
+ fn set_filter_clause(&self, builder: &mut QueryBuilder) -> QueryResult<()> {
+ if !self.status_code.is_empty() {
+ builder
+ .add_filter_in_range_clause(ApiEventDimensions::StatusCode, &self.status_code)
+ .attach_printable("Error adding status_code filter")?;
+ }
+ if !self.flow_type.is_empty() {
+ builder
+ .add_filter_in_range_clause(ApiEventDimensions::FlowType, &self.flow_type)
+ .attach_printable("Error adding flow_type filter")?;
+ }
+ if !self.api_flow.is_empty() {
+ builder
+ .add_filter_in_range_clause(ApiEventDimensions::ApiFlow, &self.api_flow)
+ .attach_printable("Error adding api_name filter")?;
+ }
+
+ Ok(())
+ }
+}
diff --git a/crates/analytics/src/clickhouse.rs b/crates/analytics/src/clickhouse.rs
new file mode 100644
index 000000000000..27d423505090
--- /dev/null
+++ b/crates/analytics/src/clickhouse.rs
@@ -0,0 +1,506 @@
+use std::sync::Arc;
+
+use actix_web::http::StatusCode;
+use common_utils::errors::ParsingError;
+use error_stack::{IntoReport, Report, ResultExt};
+use router_env::logger;
+use time::PrimitiveDateTime;
+
+use super::{
+ health_check::HealthCheck,
+ payments::{
+ distribution::PaymentDistributionRow, filters::FilterRow, metrics::PaymentMetricRow,
+ },
+ query::{Aggregate, ToSql, Window},
+ refunds::{filters::RefundFilterRow, metrics::RefundMetricRow},
+ sdk_events::{filters::SdkEventFilter, metrics::SdkEventMetricRow},
+ types::{AnalyticsCollection, AnalyticsDataSource, LoadRow, QueryExecutionError},
+};
+use crate::{
+ api_event::{
+ events::ApiLogsResult,
+ filters::ApiEventFilter,
+ metrics::{latency::LatencyAvg, ApiEventMetricRow},
+ },
+ connector_events::events::ConnectorEventsResult,
+ outgoing_webhook_event::events::OutgoingWebhookLogsResult,
+ sdk_events::events::SdkEventsResult,
+ types::TableEngine,
+};
+
+pub type ClickhouseResult = error_stack::Result;
+
+#[derive(Clone, Debug)]
+pub struct ClickhouseClient {
+ pub config: Arc,
+}
+
+#[derive(Clone, Debug, serde::Deserialize)]
+pub struct ClickhouseConfig {
+ username: String,
+ password: Option,
+ host: String,
+ database_name: String,
+}
+
+impl Default for ClickhouseConfig {
+ fn default() -> Self {
+ Self {
+ username: "default".to_string(),
+ password: None,
+ host: "http://localhost:8123".to_string(),
+ database_name: "default".to_string(),
+ }
+ }
+}
+
+impl ClickhouseClient {
+ async fn execute_query(&self, query: &str) -> ClickhouseResult> {
+ logger::debug!("Executing query: {query}");
+ let client = reqwest::Client::new();
+ let params = CkhQuery {
+ date_time_output_format: String::from("iso"),
+ output_format_json_quote_64bit_integers: 0,
+ database: self.config.database_name.clone(),
+ };
+ let response = client
+ .post(&self.config.host)
+ .query(¶ms)
+ .basic_auth(self.config.username.clone(), self.config.password.clone())
+ .body(format!("{query}\nFORMAT JSON"))
+ .send()
+ .await
+ .into_report()
+ .change_context(ClickhouseError::ConnectionError)?;
+
+ logger::debug!(clickhouse_response=?response, query=?query, "Clickhouse response");
+ if response.status() != StatusCode::OK {
+ response.text().await.map_or_else(
+ |er| {
+ Err(ClickhouseError::ResponseError)
+ .into_report()
+ .attach_printable_lazy(|| format!("Error: {er:?}"))
+ },
+ |t| Err(ClickhouseError::ResponseNotOK(t)).into_report(),
+ )
+ } else {
+ Ok(response
+ .json::>()
+ .await
+ .into_report()
+ .change_context(ClickhouseError::ResponseError)?
+ .data)
+ }
+ }
+}
+
+#[async_trait::async_trait]
+impl HealthCheck for ClickhouseClient {
+ async fn deep_health_check(
+ &self,
+ ) -> common_utils::errors::CustomResult<(), QueryExecutionError> {
+ self.execute_query("SELECT 1")
+ .await
+ .map(|_| ())
+ .change_context(QueryExecutionError::DatabaseError)
+ }
+}
+
+#[async_trait::async_trait]
+impl AnalyticsDataSource for ClickhouseClient {
+ type Row = serde_json::Value;
+
+ async fn load_results(
+ &self,
+ query: &str,
+ ) -> common_utils::errors::CustomResult, QueryExecutionError>
+ where
+ Self: LoadRow,
+ {
+ self.execute_query(query)
+ .await
+ .change_context(QueryExecutionError::DatabaseError)?
+ .into_iter()
+ .map(Self::load_row)
+ .collect::, _>>()
+ .change_context(QueryExecutionError::RowExtractionFailure)
+ }
+
+ fn get_table_engine(table: AnalyticsCollection) -> TableEngine {
+ match table {
+ AnalyticsCollection::Payment
+ | AnalyticsCollection::Refund
+ | AnalyticsCollection::PaymentIntent => {
+ TableEngine::CollapsingMergeTree { sign: "sign_flag" }
+ }
+ AnalyticsCollection::SdkEvents => TableEngine::BasicTree,
+ AnalyticsCollection::ApiEvents => TableEngine::BasicTree,
+ AnalyticsCollection::ConnectorEvents => TableEngine::BasicTree,
+ AnalyticsCollection::OutgoingWebhookEvent => TableEngine::BasicTree,
+ }
+ }
+}
+
+impl LoadRow for ClickhouseClient
+where
+ Self::Row: TryInto>,
+{
+ fn load_row(row: Self::Row) -> common_utils::errors::CustomResult {
+ row.try_into()
+ .change_context(QueryExecutionError::RowExtractionFailure)
+ }
+}
+
+impl super::payments::filters::PaymentFilterAnalytics for ClickhouseClient {}
+impl super::payments::metrics::PaymentMetricAnalytics for ClickhouseClient {}
+impl super::payments::distribution::PaymentDistributionAnalytics for ClickhouseClient {}
+impl super::refunds::metrics::RefundMetricAnalytics for ClickhouseClient {}
+impl super::refunds::filters::RefundFilterAnalytics for ClickhouseClient {}
+impl super::sdk_events::filters::SdkEventFilterAnalytics for ClickhouseClient {}
+impl super::sdk_events::metrics::SdkEventMetricAnalytics for ClickhouseClient {}
+impl super::sdk_events::events::SdkEventsFilterAnalytics for ClickhouseClient {}
+impl super::api_event::events::ApiLogsFilterAnalytics for ClickhouseClient {}
+impl super::api_event::filters::ApiEventFilterAnalytics for ClickhouseClient {}
+impl super::api_event::metrics::ApiEventMetricAnalytics for ClickhouseClient {}
+impl super::connector_events::events::ConnectorEventLogAnalytics for ClickhouseClient {}
+impl super::outgoing_webhook_event::events::OutgoingWebhookLogsFilterAnalytics
+ for ClickhouseClient
+{
+}
+
+#[derive(Debug, serde::Serialize)]
+struct CkhQuery {
+ date_time_output_format: String,
+ output_format_json_quote_64bit_integers: u8,
+ database: String,
+}
+
+#[derive(Debug, serde::Deserialize)]
+struct CkhOutput {
+ data: Vec,
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse ApiLogsResult in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse SdkEventsResult in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse ConnectorEventsResult in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse PaymentMetricRow in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse PaymentDistributionRow in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse FilterRow in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse RefundMetricRow in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse RefundFilterRow in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse ApiEventMetricRow in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse LatencyAvg in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse SdkEventMetricRow in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse SdkEventFilter in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse ApiEventFilter in clickhouse results",
+ ))
+ }
+}
+
+impl TryInto for serde_json::Value {
+ type Error = Report;
+
+ fn try_into(self) -> Result {
+ serde_json::from_value(self)
+ .into_report()
+ .change_context(ParsingError::StructParseFailure(
+ "Failed to parse OutgoingWebhookLogsResult in clickhouse results",
+ ))
+ }
+}
+
+impl ToSql for PrimitiveDateTime {
+ fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result {
+ let format =
+ time::format_description::parse("[year]-[month]-[day] [hour]:[minute]:[second]")
+ .into_report()
+ .change_context(ParsingError::DateTimeParsingError)
+ .attach_printable("Failed to parse format description")?;
+ self.format(&format)
+ .into_report()
+ .change_context(ParsingError::EncodeError(
+ "failed to encode to clickhouse date-time format",
+ ))
+ .attach_printable("Failed to format date time")
+ }
+}
+
+impl ToSql for AnalyticsCollection {
+ fn to_sql(&self, _table_engine: &TableEngine) -> error_stack::Result {
+ match self {
+ Self::Payment => Ok("payment_attempts".to_string()),
+ Self::Refund => Ok("refunds".to_string()),
+ Self::SdkEvents => Ok("sdk_events_audit".to_string()),
+ Self::ApiEvents => Ok("api_events_audit".to_string()),
+ Self::PaymentIntent => Ok("payment_intents".to_string()),
+ Self::ConnectorEvents => Ok("connector_events_audit".to_string()),
+ Self::OutgoingWebhookEvent => Ok("outgoing_webhook_events_audit".to_string()),
+ }
+ }
+}
+
+impl ToSql for Aggregate
+where
+ T: ToSql,
+{
+ fn to_sql(&self, table_engine: &TableEngine) -> error_stack::Result {
+ Ok(match self {
+ Self::Count { field: _, alias } => {
+ let query = match table_engine {
+ TableEngine::CollapsingMergeTree { sign } => format!("sum({sign})"),
+ TableEngine::BasicTree => "count(*)".to_string(),
+ };
+ format!(
+ "{query}{}",
+ alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
+ )
+ }
+ Self::Sum { field, alias } => {
+ let query = match table_engine {
+ TableEngine::CollapsingMergeTree { sign } => format!(
+ "sum({sign} * {})",
+ field
+ .to_sql(table_engine)
+ .attach_printable("Failed to sum aggregate")?
+ ),
+ TableEngine::BasicTree => format!(
+ "sum({})",
+ field
+ .to_sql(table_engine)
+ .attach_printable("Failed to sum aggregate")?
+ ),
+ };
+ format!(
+ "{query}{}",
+ alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
+ )
+ }
+ Self::Min { field, alias } => {
+ format!(
+ "min({}){}",
+ field
+ .to_sql(table_engine)
+ .attach_printable("Failed to min aggregate")?,
+ alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
+ )
+ }
+ Self::Max { field, alias } => {
+ format!(
+ "max({}){}",
+ field
+ .to_sql(table_engine)
+ .attach_printable("Failed to max aggregate")?,
+ alias.map_or_else(|| "".to_owned(), |alias| format!(" as {}", alias))
+ )
+ }
+ })
+ }
+}
+
+impl