Skip to content

Commit

Permalink
feat(analytics): Add Clickhouse based analytics (#2988)
Browse files Browse the repository at this point in the history
Co-authored-by: harsh_sharma_juspay <[email protected]>
Co-authored-by: Ivor Dsouza <[email protected]>
Co-authored-by: Chethan Rao <[email protected]>
Co-authored-by: nain-F49FF806 <[email protected]>
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: hyperswitch-bot[bot] <148525504+hyperswitch-bot[bot]@users.noreply.github.com>
Co-authored-by: akshay.s <[email protected]>
Co-authored-by: Gnanasundari24 <[email protected]>
  • Loading branch information
9 people authored Nov 29, 2023
1 parent 2e57745 commit 9df4e01
Show file tree
Hide file tree
Showing 135 changed files with 12,141 additions and 897 deletions.
121 changes: 120 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM rust:slim-bookworm as builder
FROM rust:bookworm as builder

ARG EXTRA_FEATURES=""

Expand Down Expand Up @@ -36,7 +36,7 @@ RUN cargo build --release --features release ${EXTRA_FEATURES}



FROM debian:bookworm-slim
FROM debian:bookworm

# Placing config and binary executable in different directories
ARG CONFIG_DIR=/local/config
Expand Down
30 changes: 30 additions & 0 deletions config/development.toml
Original file line number Diff line number Diff line change
Expand Up @@ -475,3 +475,33 @@ delay_between_retries_in_milliseconds = 500

[kv_config]
ttl = 900 # 15 * 60 seconds

[events]
source = "logs"

[events.kafka]
brokers = ["localhost:9092"]
intent_analytics_topic = "hyperswitch-payment-intent-events"
attempt_analytics_topic = "hyperswitch-payment-attempt-events"
refund_analytics_topic = "hyperswitch-refund-events"
api_logs_topic = "hyperswitch-api-log-events"
connector_events_topic = "hyperswitch-connector-api-events"

[analytics]
source = "sqlx"

[analytics.clickhouse]
username = "default"
# password = ""
host = "http://localhost:8123"
database_name = "default"

[analytics.sqlx]
username = "db_user"
password = "db_pass"
host = "localhost"
port = 5432
dbname = "hyperswitch_db"
pool_size = 5
connection_timeout = 10
queue_strategy = "Fifo"
18 changes: 17 additions & 1 deletion config/docker_compose.toml
Original file line number Diff line number Diff line change
Expand Up @@ -333,16 +333,32 @@ supported_connectors = "braintree"
redis_lock_expiry_seconds = 180 # 3 * 60 seconds
delay_between_retries_in_milliseconds = 500

[events.kafka]
brokers = ["localhost:9092"]
intent_analytics_topic = "hyperswitch-payment-intent-events"
attempt_analytics_topic = "hyperswitch-payment-attempt-events"
refund_analytics_topic = "hyperswitch-refund-events"
api_logs_topic = "hyperswitch-api-log-events"
connector_events_topic = "hyperswitch-connector-api-events"

[analytics]
source = "sqlx"

[analytics.clickhouse]
username = "default"
# password = ""
host = "http://localhost:8123"
database_name = "default"

[analytics.sqlx]
username = "db_user"
password = "db_pass"
host = "pg"
host = "localhost"
port = 5432
dbname = "hyperswitch_db"
pool_size = 5
connection_timeout = 10
queue_strategy = "Fifo"

[kv_config]
ttl = 900 # 15 * 60 seconds
37 changes: 37 additions & 0 deletions crates/analytics/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
[package]
name = "analytics"
version = "0.1.0"
description = "Analytics / Reports related functionality"
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html


[dependencies]
# First party crates
api_models = { version = "0.1.0", path = "../api_models" , features = ["errors"]}
storage_impl = { version = "0.1.0", path = "../storage_impl", default-features = false }
common_utils = { version = "0.1.0", path = "../common_utils"}
external_services = { version = "0.1.0", path = "../external_services", default-features = false}
masking = { version = "0.1.0", path = "../masking" }
router_env = { version = "0.1.0", path = "../router_env", features = ["log_extra_implicit_fields", "log_custom_entries_to_extra"] }
diesel_models = { version = "0.1.0", path = "../diesel_models", features = ["kv_store"] }

#Third Party dependencies
actix-web = "4.3.1"
async-trait = "0.1.68"
aws-config = { version = "0.55.3" }
aws-sdk-lambda = { version = "0.28.0" }
aws-smithy-types = { version = "0.55.3" }
bigdecimal = { version = "0.3.1", features = ["serde"] }
error-stack = "0.3.1"
futures = "0.3.28"
once_cell = "1.18.0"
reqwest = { version = "0.11.18", features = ["serde_json"] }
serde = { version = "1.0.163", features = ["derive", "rc"] }
serde_json = "1.0.96"
sqlx = { version = "0.6.3", features = ["postgres", "runtime-actix", "runtime-actix-native-tls", "time", "bigdecimal"] }
strum = { version = "0.25.0", features = ["derive"] }
thiserror = "1.0.43"
time = { version = "0.3.21", features = ["serde", "serde-well-known", "std"] }
tokio = { version = "1.28.2", features = ["macros", "rt-multi-thread"] }
45 changes: 45 additions & 0 deletions crates/analytics/docs/clickhouse/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
#### Starting the containers

In our use case we rely on kafka for ingesting events.
hence we can use docker compose to start all the components

```
docker compose up -d clickhouse-server kafka-ui
```

> kafka-ui is a visual tool for inspecting kafka on localhost:8090
#### Setting up Clickhouse

Once clickhouse is up & running you need to create the required tables for it

you can either visit the url (http://localhost:8123/play) in which the clickhouse-server is running to get a playground
Alternatively you can bash into the clickhouse container & execute commands manually
```
# On your local terminal
docker compose exec clickhouse-server bash
# Inside the clickhouse-server container shell
clickhouse-client --user default
# Inside the clickhouse-client shell
SHOW TABLES;
CREATE TABLE ......
```

The table creation scripts are provided [here](./scripts)

#### Running/Debugging your application
Once setup you can run your application either via docker compose or normally via cargo run

Remember to enable the kafka_events via development.toml/docker_compose.toml files

Inspect the [kafka-ui](http://localhost:8090) to check the messages being inserted in queue

If the messages/topic are available then you can run select queries on your clickhouse table to ensure data is being populated...

If the data is not being populated in clickhouse, you can check the error logs in clickhouse server via
```
# Inside the clickhouse-server container shell
tail -f /var/log/clickhouse-server/clickhouse-server.err.log
```
Loading

0 comments on commit 9df4e01

Please sign in to comment.