diff --git a/.eslintrc.js b/.eslintrc.js index d3d6deb301d6f..a2db223beffc0 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -113,11 +113,7 @@ module.exports = { { name: 'dayjs', message: 'Do not directly import dayjs. Only import the dayjs exported from lib/dayjs.', - }, - { - name: '@ant-design/icons', - message: 'Please use icons from the @posthog/icons package instead', - }, + } ], }, ], diff --git a/.github/actions/run-backend-tests/action.yml b/.github/actions/run-backend-tests/action.yml index 7bbe4b5147942..34c0a34cebeda 100644 --- a/.github/actions/run-backend-tests/action.yml +++ b/.github/actions/run-backend-tests/action.yml @@ -167,6 +167,12 @@ runs: --durations=100 --durations-min=1.0 --store-durations \ $PYTEST_ARGS + # Uncomment this code to create an ssh-able console so you can debug issues with github actions + # (Consider changing the timeout in ci-backend.yml to have more time) + # - name: Setup tmate session + # if: failure() + # uses: mxschmitt/action-tmate@v3 + - name: Run /decide read replica tests id: run-decide-read-replica-tests if: ${{ inputs.segment == 'Core' && inputs.group == 1 && inputs.person-on-events != 'true' }} diff --git a/.github/workflows/ci-backend.yml b/.github/workflows/ci-backend.yml index 6155740676e03..fb15ed052a75a 100644 --- a/.github/workflows/ci-backend.yml +++ b/.github/workflows/ci-backend.yml @@ -224,6 +224,7 @@ jobs: django: needs: changes + # increase for tmate testing timeout-minutes: 30 name: Django tests – ${{ matrix.segment }} (persons-on-events ${{ matrix.person-on-events && 'on' || 'off' }}), Py ${{ matrix.python-version }}, ${{ matrix.clickhouse-server-image }} (${{matrix.group}}/${{ matrix.concurrency }}) diff --git a/.github/workflows/ci-plugin-server.yml b/.github/workflows/ci-plugin-server.yml index a24eaf53d4e69..03888d2268bc7 100644 --- a/.github/workflows/ci-plugin-server.yml +++ b/.github/workflows/ci-plugin-server.yml @@ -57,7 +57,6 @@ jobs: defaults: run: working-directory: 'plugin-server' - steps: - uses: actions/checkout@v3 @@ -82,6 +81,7 @@ jobs: tests: name: Plugin Server Tests (${{matrix.shard}}) needs: changes + if: needs.changes.outputs.plugin-server == 'true' runs-on: ubuntu-latest strategy: @@ -97,21 +97,17 @@ jobs: steps: - name: Code check out - if: needs.changes.outputs.plugin-server == 'true' uses: actions/checkout@v3 - name: Stop/Start stack with Docker Compose - if: needs.changes.outputs.plugin-server == 'true' run: | docker compose -f docker-compose.dev.yml down docker compose -f docker-compose.dev.yml up -d - name: Add Kafka to /etc/hosts - if: needs.changes.outputs.plugin-server == 'true' run: echo "127.0.0.1 kafka" | sudo tee -a /etc/hosts - name: Set up Python - if: needs.changes.outputs.plugin-server == 'true' uses: actions/setup-python@v5 with: python-version: 3.11.9 @@ -122,24 +118,35 @@ jobs: # uv is a fast pip alternative: https://github.com/astral-sh/uv/ - run: pip install uv + - name: Install rust + uses: dtolnay/rust-toolchain@1.77 + + - uses: actions/cache@v4 + with: + path: | + ~/.cargo/registry + ~/.cargo/git + rust/target + key: ${{ runner.os }}-cargo-release-${{ hashFiles('**/Cargo.lock') }} + + - name: Install sqlx-cli + working-directory: rust + run: cargo install sqlx-cli@0.7.3 --no-default-features --features native-tls,postgres + - name: Install SAML (python3-saml) dependencies - if: needs.changes.outputs.plugin-server == 'true' run: | sudo apt-get update sudo apt-get install libxml2-dev libxmlsec1-dev libxmlsec1-openssl - name: Install python dependencies - if: needs.changes.outputs.plugin-server == 'true' run: | uv pip install --system -r requirements-dev.txt uv pip install --system -r requirements.txt - name: Install pnpm - if: needs.changes.outputs.plugin-server == 'true' uses: pnpm/action-setup@v4 - name: Set up Node.js - if: needs.changes.outputs.plugin-server == 'true' uses: actions/setup-node@v4 with: node-version: 18.12.1 @@ -147,17 +154,14 @@ jobs: cache-dependency-path: plugin-server/pnpm-lock.yaml - name: Install package.json dependencies with pnpm - if: needs.changes.outputs.plugin-server == 'true' run: cd plugin-server && pnpm i - name: Wait for Clickhouse, Redis & Kafka - if: needs.changes.outputs.plugin-server == 'true' run: | docker compose -f docker-compose.dev.yml up kafka redis clickhouse -d --wait bin/check_kafka_clickhouse_up - name: Set up databases - if: needs.changes.outputs.plugin-server == 'true' env: TEST: 'true' SECRET_KEY: 'abcdef' # unsafe - for testing only @@ -165,7 +169,6 @@ jobs: run: cd plugin-server && pnpm setup:test - name: Test with Jest - if: needs.changes.outputs.plugin-server == 'true' env: # Below DB name has `test_` prepended, as that's how Django (ran above) creates the test DB DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/test_posthog' diff --git a/.github/workflows/container-images-cd.yml b/.github/workflows/container-images-cd.yml index 06741577bc8ba..8bb8380ad5a93 100644 --- a/.github/workflows/container-images-cd.yml +++ b/.github/workflows/container-images-cd.yml @@ -132,7 +132,7 @@ jobs: - name: Check for changes that affect batch exports temporal worker id: check_changes_batch_exports_temporal_worker run: | - echo "changed=$((git diff --name-only HEAD^ HEAD | grep -qE '^posthog/temporal/common|^posthog/temporal/batch_exports|^posthog/batch_exports/|^posthog/management/commands/start_temporal_worker.py$' && echo true) || echo false)" >> $GITHUB_OUTPUT + echo "changed=$((git diff --name-only HEAD^ HEAD | grep -qE '^posthog/temporal/common|^posthog/temporal/batch_exports|^posthog/batch_exports/|^posthog/management/commands/start_temporal_worker.py$|^requirements.txt$' && echo true) || echo false)" >> $GITHUB_OUTPUT - name: Trigger Batch Exports Temporal Worker Cloud deployment if: steps.check_changes_batch_exports_temporal_worker.outputs.changed == 'true' @@ -158,7 +158,7 @@ jobs: - name: Check for changes that affect general purpose temporal worker id: check_changes_general_purpose_temporal_worker run: | - echo "changed=$((git diff --name-only HEAD^ HEAD | grep -qE '^posthog/temporal/common|^posthog/temporal/proxy_service|^posthog/management/commands/start_temporal_worker.py$' && echo true) || echo false)" >> $GITHUB_OUTPUT + echo "changed=$((git diff --name-only HEAD^ HEAD | grep -qE '^posthog/temporal/common|^posthog/temporal/proxy_service|^posthog/management/commands/start_temporal_worker.py$|^requirements.txt$' && echo true) || echo false)" >> $GITHUB_OUTPUT - name: Trigger General Purpose Temporal Worker Cloud deployment if: steps.check_changes_general_purpose_temporal_worker.outputs.changed == 'true' @@ -184,7 +184,7 @@ jobs: - name: Check for changes that affect data warehouse temporal worker id: check_changes_data_warehouse_temporal_worker run: | - echo "changed=$((git diff --name-only HEAD^ HEAD | grep -qE '^posthog/temporal/common|^posthog/temporal/data_imports|^posthog/warehouse/|^posthog/management/commands/start_temporal_worker.py$' && echo true) || echo false)" >> $GITHUB_OUTPUT + echo "changed=$((git diff --name-only HEAD^ HEAD | grep -qE '^posthog/temporal/common|^posthog/temporal/data_imports|^posthog/warehouse/|^posthog/management/commands/start_temporal_worker.py$|^requirements.txt$' && echo true) || echo false)" >> $GITHUB_OUTPUT - name: Trigger Data Warehouse Temporal Worker Cloud deployment if: steps.check_changes_data_warehouse_temporal_worker.outputs.changed == 'true' diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index f4d14e9ed49ce..8849e6aefe280 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -1,4 +1,4 @@ -name: Rust test CI +name: Rust CI on: workflow_dispatch: @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 5 if: github.repository == 'PostHog/posthog' - name: Determine need to run rust checks + name: Determine need to run Rust checks # Set job outputs to values from filter step outputs: rust: ${{ steps.filter.outputs.rust }} @@ -40,7 +40,7 @@ jobs: - 'ee/migrations/**' build: - name: Build rust services + name: Build Rust services needs: changes runs-on: depot-ubuntu-22.04-4 @@ -76,7 +76,7 @@ jobs: run: cargo build --all --locked --release && find target/release/ -maxdepth 1 -executable -type f | xargs strip test: - name: Test rust services + name: Test Rust services strategy: matrix: package: @@ -179,7 +179,7 @@ jobs: echo "Cargo test completed" linting: - name: Lint rust services + name: Lint Rust services needs: changes runs-on: depot-ubuntu-22.04-4 @@ -225,7 +225,7 @@ jobs: run: cargo check --all-features shear: - name: Shear rust services + name: Shear Rust services needs: changes runs-on: depot-ubuntu-22.04-4 diff --git a/.storybook/main.ts b/.storybook/main.ts index 097de03c004da..d9f3bc19feb4c 100644 --- a/.storybook/main.ts +++ b/.storybook/main.ts @@ -13,7 +13,7 @@ const config: StorybookConfig = { 'storybook-addon-pseudo-states', ], - staticDirs: ['public'], + staticDirs: ['public', { from: '../frontend/public', to: '/static' }], webpackFinal: (config) => { const mainConfig = createEntry('main') diff --git a/.vscode/launch.json b/.vscode/launch.json index 389be51af0c57..88f00c46c9502 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -119,7 +119,8 @@ "WORKER_CONCURRENCY": "2", "OBJECT_STORAGE_ENABLED": "True", "HOG_HOOK_URL": "http://localhost:3300/hoghook", - "CDP_ASYNC_FUNCTIONS_RUSTY_HOOK_TEAMS": "" + "CDP_ASYNC_FUNCTIONS_RUSTY_HOOK_TEAMS": "", + "CDP_CYCLOTRON_ENABLED_TEAMS": "*" }, "presentation": { "group": "main" diff --git a/README.md b/README.md index 6776b643722fd..133575109f45e 100644 --- a/README.md +++ b/README.md @@ -27,6 +27,7 @@ - Specify events manually, or use autocapture to get started quickly - Analyze data with ready-made visualizations, or do it yourself with SQL +- Track website visitors separately with our GA4 alternative - Only capture properties on the people you want to track, save money when you don't - Gather insights by capturing session replays, console logs, and network monitoring - Improve your product with Experiments that automatically analyze performance @@ -35,7 +36,7 @@ - Connect to external services and manage data flows with PostHog CDP PostHog is available with hosting in the EU or US and is fully SOC 2 compliant. It's free to get started and comes with a generous monthly free tier: -- 1 million product analytics events +- 1 million events - 5k session replays - 1 million feature flag requests - 250 survey responses diff --git a/bin/migrate b/bin/migrate index 1c32b3b5b0614..2f2aa49ed749b 100755 --- a/bin/migrate +++ b/bin/migrate @@ -1,5 +1,11 @@ #!/bin/bash set -e +SCRIPT_DIR=$(dirname "$(readlink -f "$0")") + +# NOTE when running in docker, rust might not exist so we need to check for it +if [ -d "$SCRIPT_DIR/../rust" ]; then + bash $SCRIPT_DIR/../rust/bin/migrate-cyclotron +fi python manage.py migrate python manage.py migrate_clickhouse diff --git a/bin/start-cyclotron b/bin/start-cyclotron index 074ec4802d0a4..2885390287c0f 100755 --- a/bin/start-cyclotron +++ b/bin/start-cyclotron @@ -12,7 +12,7 @@ export RUST_LOG=${DEBUG:-debug} SQLX_QUERY_LEVEL=${SQLX_QUERY_LEVEL:-warn} export RUST_LOG=$RUST_LOG,sqlx::query=$SQLX_QUERY_LEVEL -export DATABASE_URL=${DATABASE_URL:-postgres://posthog:posthog@localhost:5432/posthog} +export DATABASE_URL=${CYCLOTRON_DATABASE_URL:-postgres://posthog:posthog@localhost:5432/cyclotron} export ALLOW_INTERNAL_IPS=${ALLOW_INTERNAL_IPS:-true} ./target/debug/cyclotron-fetch & diff --git a/cypress/e2e/dashboard-shared.cy.ts b/cypress/e2e/dashboard-shared.cy.ts index ba24a6a558aac..4e46554160424 100644 --- a/cypress/e2e/dashboard-shared.cy.ts +++ b/cypress/e2e/dashboard-shared.cy.ts @@ -62,7 +62,6 @@ describe('Shared dashboard', () => { cy.get('.InsightCard').should('have.length', 6) // Make sure no element with text "There are no matching events for this query" exists - // TODO this was failing, it shouldn't be but YOLO - // cy.get('.insight-empty-state').should('not.exist') + cy.get('.insight-empty-state').should('not.exist') }) }) diff --git a/docker-compose.dev-full.yml b/docker-compose.dev-full.yml index 96db120b76660..606d5f6a3bc9f 100644 --- a/docker-compose.dev-full.yml +++ b/docker-compose.dev-full.yml @@ -47,6 +47,8 @@ services: - ./docker/clickhouse/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d - ./docker/clickhouse/config.xml:/etc/clickhouse-server/config.xml - ./docker/clickhouse/users-dev.xml:/etc/clickhouse-server/users.xml + - ./docker/clickhouse/user_defined_function.xml:/etc/clickhouse-server/user_defined_function.xml + - ./posthog/user_scripts:/var/lib/clickhouse/user_scripts depends_on: - kafka - zookeeper diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index d21b1af2ee75d..e9d698c2d3584 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -61,10 +61,15 @@ services: - '9440:9440' - '9009:9009' volumes: + # this new entrypoint file is to fix a bug detailed here https://github.com/ClickHouse/ClickHouse/pull/59991 + # revert this when we upgrade clickhouse + - ./docker/clickhouse/entrypoint.sh:/entrypoint.sh - ./posthog/idl:/idl - ./docker/clickhouse/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d - ./docker/clickhouse/config.xml:/etc/clickhouse-server/config.xml - ./docker/clickhouse/users-dev.xml:/etc/clickhouse-server/users.xml + - ./docker/clickhouse/user_defined_function.xml:/etc/clickhouse-server/user_defined_function.xml + - ./posthog/user_scripts:/var/lib/clickhouse/user_scripts extra_hosts: - 'host.docker.internal:host-gateway' depends_on: diff --git a/docker/clickhouse/docker-entrypoint-initdb.d/init-db.sh b/docker/clickhouse/docker-entrypoint-initdb.d/init-db.sh index 0d4f4332c98ad..4141e3345d05b 100755 --- a/docker/clickhouse/docker-entrypoint-initdb.d/init-db.sh +++ b/docker/clickhouse/docker-entrypoint-initdb.d/init-db.sh @@ -1,4 +1,5 @@ #!/bin/bash set -e +apk add python3 cp -r /idl/* /var/lib/clickhouse/format_schemas/ diff --git a/docker/clickhouse/entrypoint.sh b/docker/clickhouse/entrypoint.sh new file mode 100755 index 0000000000000..7d247ab14ea2a --- /dev/null +++ b/docker/clickhouse/entrypoint.sh @@ -0,0 +1,207 @@ +#!/bin/bash + +set -eo pipefail +shopt -s nullglob + +DO_CHOWN=1 +if [ "${CLICKHOUSE_DO_NOT_CHOWN:-0}" = "1" ]; then + DO_CHOWN=0 +fi + +CLICKHOUSE_UID="${CLICKHOUSE_UID:-"$(id -u clickhouse)"}" +CLICKHOUSE_GID="${CLICKHOUSE_GID:-"$(id -g clickhouse)"}" + +# support --user +if [ "$(id -u)" = "0" ]; then + USER=$CLICKHOUSE_UID + GROUP=$CLICKHOUSE_GID +else + USER="$(id -u)" + GROUP="$(id -g)" + DO_CHOWN=0 +fi + +# set some vars +CLICKHOUSE_CONFIG="${CLICKHOUSE_CONFIG:-/etc/clickhouse-server/config.xml}" + +# get CH directories locations +DATA_DIR="$(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key=path || true)" +TMP_DIR="$(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key=tmp_path || true)" +USER_PATH="$(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key=user_files_path || true)" +LOG_PATH="$(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key=logger.log || true)" +LOG_DIR="" +if [ -n "$LOG_PATH" ]; then LOG_DIR="$(dirname "$LOG_PATH")"; fi +ERROR_LOG_PATH="$(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key=logger.errorlog || true)" +ERROR_LOG_DIR="" +if [ -n "$ERROR_LOG_PATH" ]; then ERROR_LOG_DIR="$(dirname "$ERROR_LOG_PATH")"; fi +FORMAT_SCHEMA_PATH="$(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key=format_schema_path || true)" + +# There could be many disks declared in config +readarray -t DISKS_PATHS < <(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key='storage_configuration.disks.*.path' || true) +readarray -t DISKS_METADATA_PATHS < <(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key='storage_configuration.disks.*.metadata_path' || true) + +CLICKHOUSE_USER="${CLICKHOUSE_USER:-default}" +CLICKHOUSE_PASSWORD="${CLICKHOUSE_PASSWORD:-}" +CLICKHOUSE_DB="${CLICKHOUSE_DB:-}" +CLICKHOUSE_ACCESS_MANAGEMENT="${CLICKHOUSE_DEFAULT_ACCESS_MANAGEMENT:-0}" + +for dir in "$DATA_DIR" \ + "$ERROR_LOG_DIR" \ + "$LOG_DIR" \ + "$TMP_DIR" \ + "$USER_PATH" \ + "$FORMAT_SCHEMA_PATH" \ + "${DISKS_PATHS[@]}" \ + "${DISKS_METADATA_PATHS[@]}" +do + # check if variable not empty + [ -z "$dir" ] && continue + # ensure directories exist + if [ "$DO_CHOWN" = "1" ]; then + mkdir="mkdir" + else + # if DO_CHOWN=0 it means that the system does not map root user to "admin" permissions + # it mainly happens on NFS mounts where root==nobody for security reasons + # thus mkdir MUST run with user id/gid and not from nobody that has zero permissions + mkdir="/usr/bin/clickhouse su "${USER}:${GROUP}" mkdir" + fi + if ! $mkdir -p "$dir"; then + echo "Couldn't create necessary directory: $dir" + exit 1 + fi + + if [ "$DO_CHOWN" = "1" ]; then + # ensure proper directories permissions + # but skip it for if directory already has proper premissions, cause recursive chown may be slow + if [ "$(stat -c %u "$dir")" != "$USER" ] || [ "$(stat -c %g "$dir")" != "$GROUP" ]; then + chown -R "$USER:$GROUP" "$dir" + fi + fi +done + +# if clickhouse user is defined - create it (user "default" already exists out of box) +if [ -n "$CLICKHOUSE_USER" ] && [ "$CLICKHOUSE_USER" != "default" ] || [ -n "$CLICKHOUSE_PASSWORD" ] || [ "$CLICKHOUSE_ACCESS_MANAGEMENT" != "0" ]; then + echo "$0: create new user '$CLICKHOUSE_USER' instead 'default'" + cat < /etc/clickhouse-server/users.d/default-user.xml + + + + + + + + <${CLICKHOUSE_USER}> + default + + ::/0 + + ${CLICKHOUSE_PASSWORD} + default + ${CLICKHOUSE_ACCESS_MANAGEMENT} + + + +EOT +fi + +CLICKHOUSE_ALWAYS_RUN_INITDB_SCRIPTS="${CLICKHOUSE_ALWAYS_RUN_INITDB_SCRIPTS:-}" + +# checking $DATA_DIR for initialization +if [ -d "${DATA_DIR%/}/data" ]; then + DATABASE_ALREADY_EXISTS='true' +fi + +# run initialization if flag CLICKHOUSE_ALWAYS_RUN_INITDB_SCRIPTS is not empty or data directory is empty +if [[ -n "${CLICKHOUSE_ALWAYS_RUN_INITDB_SCRIPTS}" || -z "${DATABASE_ALREADY_EXISTS}" ]]; then + RUN_INITDB_SCRIPTS='true' +fi + +if [ -n "${RUN_INITDB_SCRIPTS}" ]; then + if [ -n "$(ls /docker-entrypoint-initdb.d/)" ] || [ -n "$CLICKHOUSE_DB" ]; then + # port is needed to check if clickhouse-server is ready for connections + HTTP_PORT="$(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key=http_port --try)" + HTTPS_PORT="$(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key=https_port --try)" + + if [ -n "$HTTP_PORT" ]; then + URL="http://127.0.0.1:$HTTP_PORT/ping" + else + URL="https://127.0.0.1:$HTTPS_PORT/ping" + fi + + # Listen only on localhost until the initialization is done + /usr/bin/clickhouse su "${USER}:${GROUP}" /usr/bin/clickhouse-server --config-file="$CLICKHOUSE_CONFIG" -- --listen_host=127.0.0.1 & + pid="$!" + + # check if clickhouse is ready to accept connections + # will try to send ping clickhouse via http_port (max 1000 retries by default, with 1 sec timeout and 1 sec delay between retries) + tries=${CLICKHOUSE_INIT_TIMEOUT:-1000} + while ! wget --spider --no-check-certificate -T 1 -q "$URL" 2>/dev/null; do + if [ "$tries" -le "0" ]; then + echo >&2 'ClickHouse init process failed.' + exit 1 + fi + tries=$(( tries-1 )) + sleep 1 + done + + clickhouseclient=( clickhouse-client --multiquery --host "127.0.0.1" -u "$CLICKHOUSE_USER" --password "$CLICKHOUSE_PASSWORD" ) + + echo + + # create default database, if defined + if [ -n "$CLICKHOUSE_DB" ]; then + echo "$0: create database '$CLICKHOUSE_DB'" + "${clickhouseclient[@]}" -q "CREATE DATABASE IF NOT EXISTS $CLICKHOUSE_DB"; + fi + + for f in /docker-entrypoint-initdb.d/*; do + case "$f" in + *.sh) + if [ -x "$f" ]; then + echo "$0: running $f" + "$f" + else + echo "$0: sourcing $f" + # shellcheck source=/dev/null + . "$f" + fi + ;; + *.sql) echo "$0: running $f"; "${clickhouseclient[@]}" < "$f" ; echo ;; + *.sql.gz) echo "$0: running $f"; gunzip -c "$f" | "${clickhouseclient[@]}"; echo ;; + *) echo "$0: ignoring $f" ;; + esac + echo + done + + if ! kill -s TERM "$pid" || ! wait "$pid"; then + echo >&2 'Finishing of ClickHouse init process failed.' + exit 1 + fi + fi +else + echo "ClickHouse Database directory appears to contain a database; Skipping initialization" +fi + +# if no args passed to `docker run` or first argument start with `--`, then the user is passing clickhouse-server arguments +if [[ $# -lt 1 ]] || [[ "$1" == "--"* ]]; then + # Watchdog is launched by default, but does not send SIGINT to the main process, + # so the container can't be finished by ctrl+c + CLICKHOUSE_WATCHDOG_ENABLE=${CLICKHOUSE_WATCHDOG_ENABLE:-0} + export CLICKHOUSE_WATCHDOG_ENABLE + + # An option for easy restarting and replacing clickhouse-server in a container, especially in Kubernetes. + # For example, you can replace the clickhouse-server binary to another and restart it while keeping the container running. + if [[ "${CLICKHOUSE_DOCKER_RESTART_ON_EXIT:-0}" -eq "1" ]]; then + while true; do + # This runs the server as a child process of the shell script: + /usr/bin/clickhouse su "${USER}:${GROUP}" /usr/bin/clickhouse-server --config-file="$CLICKHOUSE_CONFIG" "$@" ||: + echo >&2 'ClickHouse Server exited, and the environment variable CLICKHOUSE_DOCKER_RESTART_ON_EXIT is set to 1. Restarting the server.' + done + else + # This replaces the shell script with the server: + exec /usr/bin/clickhouse su "${USER}:${GROUP}" /usr/bin/clickhouse-server --config-file="$CLICKHOUSE_CONFIG" "$@" + fi +fi + +# Otherwise, we assume the user want to run his own process, for example a `bash` shell to explore this image +exec "$@" diff --git a/docker/clickhouse/user_defined_function.xml b/docker/clickhouse/user_defined_function.xml new file mode 100644 index 0000000000000..9617d2495ced1 --- /dev/null +++ b/docker/clickhouse/user_defined_function.xml @@ -0,0 +1,287 @@ + + + executable + aggregate_funnel + Array(Tuple(Int8, Nullable(String), Array(Float64))) + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Nullable(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), Nullable(String), Array(Int8))) + value + + JSONEachRow + aggregate_funnel.py + + + + executable + aggregate_funnel_cohort + Array(Tuple(Int8, UInt64, Array(Float64))) + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(UInt64) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, Array(Int8))) + value + + JSONEachRow + aggregate_funnel_cohort.py + + + + executable + aggregate_funnel_array + Array(Tuple(Int8, Array(String), Array(Float64))) + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), Array(String), Array(Int8))) + value + + JSONEachRow + aggregate_funnel_array.py + + + + executable + aggregate_funnel_test + String + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), Nullable(String), Array(Int8))) + value + + JSONEachRow + aggregate_funnel_test.py + + + + executable + aggregate_funnel_trends + Array(Tuple(DateTime, Int8, Nullable(String))) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Nullable(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), Nullable(DateTime), Nullable(String), Array(Int8))) + value + + JSONEachRow + aggregate_funnel_trends.py + + + + executable + aggregate_funnel_array_trends + + Array(Tuple(DateTime, Int8, Array(String))) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), Nullable(DateTime), Array(String), Array(Int8))) + value + + JSONEachRow + aggregate_funnel_array_trends.py + + + + executable + aggregate_funnel_cohort_trends + + Array(Tuple(DateTime, Int8, UInt64)) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(UInt64) + prop_vals + + + Array(Tuple(Nullable(Float64), Nullable(DateTime), UInt64, Array(Int8))) + value + + JSONEachRow + aggregate_funnel_cohort_trends.py + + + + executable + aggregate_funnel_array_trends_test + String + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), Nullable(DateTime), Array(String), Array(Int8))) + value + + JSONEachRow + aggregate_funnel_array_trends_test.py + + \ No newline at end of file diff --git a/ee/billing/quota_limiting.py b/ee/billing/quota_limiting.py index f91811d866a40..e5f3d641b5364 100644 --- a/ee/billing/quota_limiting.py +++ b/ee/billing/quota_limiting.py @@ -134,7 +134,7 @@ def org_quota_limited_until( if posthoganalytics.feature_enabled( QUOTA_LIMIT_DATA_RETENTION_FLAG, - organization.id, + str(organization.id), groups={"organization": str(organization.id)}, group_properties={"organization": {"id": str(organization.id)}}, ): diff --git a/ee/billing/test/test_quota_limiting.py b/ee/billing/test/test_quota_limiting.py index 926e3441c4f73..fedf7b15a54ec 100644 --- a/ee/billing/test/test_quota_limiting.py +++ b/ee/billing/test/test_quota_limiting.py @@ -69,9 +69,9 @@ def test_quota_limiting_feature_flag_enabled(self, patch_feature_enabled, patch_ quota_limited_orgs, quota_limiting_suspended_orgs = update_all_org_billing_quotas() patch_feature_enabled.assert_called_with( QUOTA_LIMIT_DATA_RETENTION_FLAG, - self.organization.id, + str(self.organization.id), groups={"organization": org_id}, - group_properties={"organization": {"id": org_id}}, + group_properties={"organization": {"id": str(org_id)}}, ) patch_capture.assert_called_once_with( org_id, @@ -101,7 +101,7 @@ def test_quota_limiting_feature_flag_enabled(self, patch_feature_enabled, patch_ quota_limited_orgs, quota_limiting_suspended_orgs = update_all_org_billing_quotas() patch_feature_enabled.assert_called_with( QUOTA_LIMIT_DATA_RETENTION_FLAG, - self.organization.id, + str(self.organization.id), groups={"organization": org_id}, group_properties={"organization": {"id": org_id}}, ) diff --git a/ee/frontend/mobile-replay/__mocks__/encoded-snapshot-data.ts b/ee/frontend/mobile-replay/__mocks__/encoded-snapshot-data.ts new file mode 100644 index 0000000000000..ceb176d49ce35 --- /dev/null +++ b/ee/frontend/mobile-replay/__mocks__/encoded-snapshot-data.ts @@ -0,0 +1,6 @@ +export const encodedWebSnapshotData: string[] = [ + // first item could be a network event or something else + '{"windowId":"0191C63B-03FF-73B5-96BE-40BE2761621C","data":{"payload":{"requests":[{"duration":28,"entryType":"resource","initiatorType":"fetch","method":"GET","name":"https://1.bp.blogspot.com/-hkNkoCjc5UA/T4JTlCjhhfI/AAAAAAAAB98/XxQwZ-QPkI8/s1600/Free+Google+Wallpapers+3.jpg","responseStatus":200,"timestamp":1725369200216,"transferSize":82375}]},"plugin":"rrweb/network@1"},"timestamp":1725369200216,"type":6,"seen":8833798676917222}', + '{"windowId":"0191C63B-03FF-73B5-96BE-40BE2761621C","data":{"height":852,"width":393},"timestamp":1725607643113,"type":4,"seen":4930607506458337}', + '{"windowId":"0191C63B-03FF-73B5-96BE-40BE2761621C","data":{"initialOffset":{"left":0,"top":0},"wireframes":[{"base64":"data:image/jpeg;base64,/9j/4AAQSkZJR","height":852,"id":4324378400,"type":"screenshot","width":393,"x":0,"y":0}]},"timestamp":1725607643113,"type":2,"seen":2118469619185818}', +] diff --git a/ee/frontend/mobile-replay/__snapshots__/parsing.test.ts.snap b/ee/frontend/mobile-replay/__snapshots__/parsing.test.ts.snap new file mode 100644 index 0000000000000..c916dd21d54c5 --- /dev/null +++ b/ee/frontend/mobile-replay/__snapshots__/parsing.test.ts.snap @@ -0,0 +1,339 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`snapshot parsing handles mobile data with no meta event 1`] = ` +[ + { + "data": { + "payload": { + "requests": [ + { + "duration": 28, + "entryType": "resource", + "initiatorType": "fetch", + "method": "GET", + "name": "https://1.bp.blogspot.com/-hkNkoCjc5UA/T4JTlCjhhfI/AAAAAAAAB98/XxQwZ-QPkI8/s1600/Free+Google+Wallpapers+3.jpg", + "responseStatus": 200, + "timestamp": 1725369200216, + "transferSize": 82375, + }, + ], + }, + "plugin": "rrweb/network@1", + }, + "seen": 8833798676917222, + "timestamp": 1725369200216, + "type": 6, + "windowId": "0191C63B-03FF-73B5-96BE-40BE2761621C", + }, + { + "data": { + "height": 852, + "href": "", + "width": 393, + }, + "timestamp": 1725607643113, + "type": 4, + "windowId": "0191C63B-03FF-73B5-96BE-40BE2761621C", + }, + { + "data": { + "initialOffset": { + "left": 0, + "top": 0, + }, + "node": { + "childNodes": [ + { + "id": 2, + "name": "html", + "publicId": "", + "systemId": "", + "type": 1, + }, + { + "attributes": { + "data-rrweb-id": 3, + "style": "height: 100vh; width: 100vw;", + }, + "childNodes": [ + { + "attributes": { + "data-rrweb-id": 4, + }, + "childNodes": [ + { + "attributes": { + "type": "text/css", + }, + "childNodes": [ + { + "id": 101, + "textContent": " + body { + margin: unset; + } + input, button, select, textarea { + font: inherit; + margin: 0; + padding: 0; + border: 0; + outline: 0; + background: transparent; + padding-block: 0 !important; + } + .input:focus { + outline: none; + } + img { + border-style: none; + } + ", + "type": 3, + }, + ], + "id": 100, + "tagName": "style", + "type": 2, + }, + ], + "id": 4, + "tagName": "head", + "type": 2, + }, + { + "attributes": { + "data-rrweb-id": 5, + "style": "height: 100vh; width: 100vw;", + }, + "childNodes": [ + { + "attributes": { + "data-rrweb-id": 4324378400, + "height": 852, + "src": "data:image/jpeg;base64,/9j/4AAQSkZJR", + "style": "width: 393px;height: 852px;position: fixed;left: 0px;top: 0px;", + "width": 393, + }, + "childNodes": [], + "id": 4324378400, + "tagName": "img", + "type": 2, + }, + { + "attributes": { + "data-render-reason": "a fixed placeholder to contain the keyboard in the correct stacking position", + "data-rrweb-id": 9, + }, + "childNodes": [], + "id": 9, + "tagName": "div", + "type": 2, + }, + { + "attributes": { + "data-rrweb-id": 7, + }, + "childNodes": [], + "id": 7, + "tagName": "div", + "type": 2, + }, + { + "attributes": { + "data-rrweb-id": 11, + }, + "childNodes": [], + "id": 11, + "tagName": "div", + "type": 2, + }, + ], + "id": 5, + "tagName": "body", + "type": 2, + }, + ], + "id": 3, + "tagName": "html", + "type": 2, + }, + ], + "id": 1, + "type": 0, + }, + }, + "timestamp": 1725607643113, + "type": 2, + "windowId": "0191C63B-03FF-73B5-96BE-40BE2761621C", + }, +] +`; + +exports[`snapshot parsing handles normal mobile data 1`] = ` +[ + { + "data": { + "payload": { + "requests": [ + { + "duration": 28, + "entryType": "resource", + "initiatorType": "fetch", + "method": "GET", + "name": "https://1.bp.blogspot.com/-hkNkoCjc5UA/T4JTlCjhhfI/AAAAAAAAB98/XxQwZ-QPkI8/s1600/Free+Google+Wallpapers+3.jpg", + "responseStatus": 200, + "timestamp": 1725369200216, + "transferSize": 82375, + }, + ], + }, + "plugin": "rrweb/network@1", + }, + "seen": 8833798676917222, + "timestamp": 1725369200216, + "type": 6, + "windowId": "0191C63B-03FF-73B5-96BE-40BE2761621C", + }, + { + "data": { + "height": 852, + "href": "", + "width": 393, + }, + "timestamp": 1725607643113, + "type": 4, + "windowId": "0191C63B-03FF-73B5-96BE-40BE2761621C", + }, + { + "data": { + "initialOffset": { + "left": 0, + "top": 0, + }, + "node": { + "childNodes": [ + { + "id": 2, + "name": "html", + "publicId": "", + "systemId": "", + "type": 1, + }, + { + "attributes": { + "data-rrweb-id": 3, + "style": "height: 100vh; width: 100vw;", + }, + "childNodes": [ + { + "attributes": { + "data-rrweb-id": 4, + }, + "childNodes": [ + { + "attributes": { + "type": "text/css", + }, + "childNodes": [ + { + "id": 101, + "textContent": " + body { + margin: unset; + } + input, button, select, textarea { + font: inherit; + margin: 0; + padding: 0; + border: 0; + outline: 0; + background: transparent; + padding-block: 0 !important; + } + .input:focus { + outline: none; + } + img { + border-style: none; + } + ", + "type": 3, + }, + ], + "id": 100, + "tagName": "style", + "type": 2, + }, + ], + "id": 4, + "tagName": "head", + "type": 2, + }, + { + "attributes": { + "data-rrweb-id": 5, + "style": "height: 100vh; width: 100vw;", + }, + "childNodes": [ + { + "attributes": { + "data-rrweb-id": 4324378400, + "height": 852, + "src": "data:image/jpeg;base64,/9j/4AAQSkZJR", + "style": "width: 393px;height: 852px;position: fixed;left: 0px;top: 0px;", + "width": 393, + }, + "childNodes": [], + "id": 4324378400, + "tagName": "img", + "type": 2, + }, + { + "attributes": { + "data-render-reason": "a fixed placeholder to contain the keyboard in the correct stacking position", + "data-rrweb-id": 9, + }, + "childNodes": [], + "id": 9, + "tagName": "div", + "type": 2, + }, + { + "attributes": { + "data-rrweb-id": 7, + }, + "childNodes": [], + "id": 7, + "tagName": "div", + "type": 2, + }, + { + "attributes": { + "data-rrweb-id": 11, + }, + "childNodes": [], + "id": 11, + "tagName": "div", + "type": 2, + }, + ], + "id": 5, + "tagName": "body", + "type": 2, + }, + ], + "id": 3, + "tagName": "html", + "type": 2, + }, + ], + "id": 1, + "type": 0, + }, + }, + "timestamp": 1725607643113, + "type": 2, + "windowId": "0191C63B-03FF-73B5-96BE-40BE2761621C", + }, +] +`; diff --git a/ee/frontend/mobile-replay/parsing.test.ts b/ee/frontend/mobile-replay/parsing.test.ts new file mode 100644 index 0000000000000..5d913b4117833 --- /dev/null +++ b/ee/frontend/mobile-replay/parsing.test.ts @@ -0,0 +1,20 @@ +import { parseEncodedSnapshots } from 'scenes/session-recordings/player/sessionRecordingDataLogic' + +import { encodedWebSnapshotData } from './__mocks__/encoded-snapshot-data' + +describe('snapshot parsing', () => { + const sessionId = '12345' + const numberOfParsedLinesInData = 3 + + it('handles normal mobile data', async () => { + const parsed = await parseEncodedSnapshots(encodedWebSnapshotData, sessionId, true) + expect(parsed.length).toEqual(numberOfParsedLinesInData) + expect(parsed).toMatchSnapshot() + }) + it('handles mobile data with no meta event', async () => { + const withoutMeta = [encodedWebSnapshotData[0], encodedWebSnapshotData[2]] + const parsed = await parseEncodedSnapshots(withoutMeta, sessionId, true) + expect(parsed.length).toEqual(numberOfParsedLinesInData) + expect(parsed).toMatchSnapshot() + }) +}) diff --git a/ee/session_recordings/session_recording_playlist.py b/ee/session_recordings/session_recording_playlist.py index 28d3353c0576f..8947e1c270ee4 100644 --- a/ee/session_recordings/session_recording_playlist.py +++ b/ee/session_recordings/session_recording_playlist.py @@ -258,7 +258,7 @@ def modify_recordings( return response.Response({"success": True}) if request.method == "DELETE": - playlist_item = SessionRecordingPlaylistItem.objects.get(playlist=playlist, recording=session_recording_id) # type: ignore + playlist_item = SessionRecordingPlaylistItem.objects.get(playlist=playlist, recording=session_recording_id) if playlist_item: playlist_item.delete() diff --git a/frontend/__snapshots__/components-activitylog--data-management-activity--dark.png b/frontend/__snapshots__/components-activitylog--data-management-activity--dark.png new file mode 100644 index 0000000000000..47bac4cfceaae Binary files /dev/null and b/frontend/__snapshots__/components-activitylog--data-management-activity--dark.png differ diff --git a/frontend/__snapshots__/components-activitylog--data-management-activity--light.png b/frontend/__snapshots__/components-activitylog--data-management-activity--light.png new file mode 100644 index 0000000000000..b10de740dcbe2 Binary files /dev/null and b/frontend/__snapshots__/components-activitylog--data-management-activity--light.png differ diff --git a/frontend/__snapshots__/components-activitylog--feature-flag-activity--dark.png b/frontend/__snapshots__/components-activitylog--feature-flag-activity--dark.png new file mode 100644 index 0000000000000..4e1c10ae84224 Binary files /dev/null and b/frontend/__snapshots__/components-activitylog--feature-flag-activity--dark.png differ diff --git a/frontend/__snapshots__/components-activitylog--feature-flag-activity--light.png b/frontend/__snapshots__/components-activitylog--feature-flag-activity--light.png new file mode 100644 index 0000000000000..92e650848ddf5 Binary files /dev/null and b/frontend/__snapshots__/components-activitylog--feature-flag-activity--light.png differ diff --git a/frontend/__snapshots__/components-activitylog--insight-activity--dark.png b/frontend/__snapshots__/components-activitylog--insight-activity--dark.png new file mode 100644 index 0000000000000..02301a7432364 Binary files /dev/null and b/frontend/__snapshots__/components-activitylog--insight-activity--dark.png differ diff --git a/frontend/__snapshots__/components-activitylog--insight-activity--light.png b/frontend/__snapshots__/components-activitylog--insight-activity--light.png new file mode 100644 index 0000000000000..38e6385740f05 Binary files /dev/null and b/frontend/__snapshots__/components-activitylog--insight-activity--light.png differ diff --git a/frontend/__snapshots__/components-activitylog--persons-activity--dark.png b/frontend/__snapshots__/components-activitylog--persons-activity--dark.png new file mode 100644 index 0000000000000..1d4591faf5288 Binary files /dev/null and b/frontend/__snapshots__/components-activitylog--persons-activity--dark.png differ diff --git a/frontend/__snapshots__/components-activitylog--persons-activity--light.png b/frontend/__snapshots__/components-activitylog--persons-activity--light.png new file mode 100644 index 0000000000000..9d99a379eb717 Binary files /dev/null and b/frontend/__snapshots__/components-activitylog--persons-activity--light.png differ diff --git a/frontend/__snapshots__/components-activitylog--team-activity--dark.png b/frontend/__snapshots__/components-activitylog--team-activity--dark.png new file mode 100644 index 0000000000000..df50289ef226c Binary files /dev/null and b/frontend/__snapshots__/components-activitylog--team-activity--dark.png differ diff --git a/frontend/__snapshots__/components-activitylog--team-activity--light.png b/frontend/__snapshots__/components-activitylog--team-activity--light.png new file mode 100644 index 0000000000000..cac2ad1111338 Binary files /dev/null and b/frontend/__snapshots__/components-activitylog--team-activity--light.png differ diff --git a/frontend/__snapshots__/components-activitylog--with-caption--dark.png b/frontend/__snapshots__/components-activitylog--with-caption--dark.png new file mode 100644 index 0000000000000..2ed64b93d727e Binary files /dev/null and b/frontend/__snapshots__/components-activitylog--with-caption--dark.png differ diff --git a/frontend/__snapshots__/components-activitylog--with-caption--light.png b/frontend/__snapshots__/components-activitylog--with-caption--light.png new file mode 100644 index 0000000000000..e29d7c742cb87 Binary files /dev/null and b/frontend/__snapshots__/components-activitylog--with-caption--light.png differ diff --git a/frontend/__snapshots__/components-activitylog--with-no-data--dark.png b/frontend/__snapshots__/components-activitylog--with-no-data--dark.png new file mode 100644 index 0000000000000..a624d71b8a149 Binary files /dev/null and b/frontend/__snapshots__/components-activitylog--with-no-data--dark.png differ diff --git a/frontend/__snapshots__/components-activitylog--with-no-data--light.png b/frontend/__snapshots__/components-activitylog--with-no-data--light.png new file mode 100644 index 0000000000000..f26dab1a75e0a Binary files /dev/null and b/frontend/__snapshots__/components-activitylog--with-no-data--light.png differ diff --git a/frontend/__snapshots__/components-activitylog--without-audit-logs-feaure--dark.png b/frontend/__snapshots__/components-activitylog--without-audit-logs-feaure--dark.png new file mode 100644 index 0000000000000..b53b2343245c5 Binary files /dev/null and b/frontend/__snapshots__/components-activitylog--without-audit-logs-feaure--dark.png differ diff --git a/frontend/__snapshots__/components-activitylog--without-audit-logs-feaure--light.png b/frontend/__snapshots__/components-activitylog--without-audit-logs-feaure--light.png new file mode 100644 index 0000000000000..1001ff69a0133 Binary files /dev/null and b/frontend/__snapshots__/components-activitylog--without-audit-logs-feaure--light.png differ diff --git a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--light.png b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--light.png index 648af90308438..995d0b635e41f 100644 Binary files a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--light.png and b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--light.png differ diff --git a/frontend/__snapshots__/posthog-3000-navigation--navigation-base--dark.png b/frontend/__snapshots__/posthog-3000-navigation--navigation-base--dark.png index 4037a2ef634ce..9906158e3ee89 100644 Binary files a/frontend/__snapshots__/posthog-3000-navigation--navigation-base--dark.png and b/frontend/__snapshots__/posthog-3000-navigation--navigation-base--dark.png differ diff --git a/frontend/__snapshots__/posthog-3000-navigation--navigation-base--light.png b/frontend/__snapshots__/posthog-3000-navigation--navigation-base--light.png index cab4754531e0a..63cd76988cf89 100644 Binary files a/frontend/__snapshots__/posthog-3000-navigation--navigation-base--light.png and b/frontend/__snapshots__/posthog-3000-navigation--navigation-base--light.png differ diff --git a/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png b/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png index 82bcb0f5bb0ce..4fa2848023908 100644 Binary files a/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png and b/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png differ diff --git a/frontend/__snapshots__/replay-player-success--recent-recordings--light.png b/frontend/__snapshots__/replay-player-success--recent-recordings--light.png index 09792a848277f..a81c99d60c142 100644 Binary files a/frontend/__snapshots__/replay-player-success--recent-recordings--light.png and b/frontend/__snapshots__/replay-player-success--recent-recordings--light.png differ diff --git a/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png b/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png index 1b8480c6221a3..3f48b36bd39f5 100644 Binary files a/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png and b/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png b/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png index 13c26071b3bcc..9d83ae1c67360 100644 Binary files a/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png and b/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png b/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png index 5a0cbd2405b75..29233389ab5da 100644 Binary files a/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png and b/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png differ diff --git a/frontend/__snapshots__/scenes-app-errortracking--list-page--dark.png b/frontend/__snapshots__/scenes-app-errortracking--list-page--dark.png index 4c2decb1bc733..dd8c6705a4617 100644 Binary files a/frontend/__snapshots__/scenes-app-errortracking--list-page--dark.png and b/frontend/__snapshots__/scenes-app-errortracking--list-page--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-errortracking--list-page--light.png b/frontend/__snapshots__/scenes-app-errortracking--list-page--light.png index 7939c5463f8ee..61513fef585d0 100644 Binary files a/frontend/__snapshots__/scenes-app-errortracking--list-page--light.png and b/frontend/__snapshots__/scenes-app-errortracking--list-page--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--dark.png index 8b41666303c28..79db1daa2edf6 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png index 168f812ecd62d..b0c4487104da4 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--dark--webkit.png index ec10f0387e504..eb0e2b6898835 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--dark.png index 086476ba716c0..0ddf6883a14b9 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--light--webkit.png index fd76db27712b0..2083ec1e7d2f6 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--light.png b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--light.png index 1072316bf304a..6271cc120bbfd 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--user-paths-edit--light.png b/frontend/__snapshots__/scenes-app-insights--user-paths-edit--light.png index ca1cea1ec5a32..de05692e7bfb6 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--user-paths-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--user-paths-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights-error-empty-states--funnel-single-step--dark.png b/frontend/__snapshots__/scenes-app-insights-error-empty-states--funnel-single-step--dark.png index 30382a3ae33cd..b18a4b155ef7a 100644 Binary files a/frontend/__snapshots__/scenes-app-insights-error-empty-states--funnel-single-step--dark.png and b/frontend/__snapshots__/scenes-app-insights-error-empty-states--funnel-single-step--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights-error-empty-states--funnel-single-step--light.png b/frontend/__snapshots__/scenes-app-insights-error-empty-states--funnel-single-step--light.png index 6a3903bcfb483..fbfb753facedc 100644 Binary files a/frontend/__snapshots__/scenes-app-insights-error-empty-states--funnel-single-step--light.png and b/frontend/__snapshots__/scenes-app-insights-error-empty-states--funnel-single-step--light.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-activation--dark.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-activation--dark.png index a398da15097b4..a8ae98e7ffb57 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-activation--dark.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-activation--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--dark.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--dark.png index 1c4c663fb6eda..d564f7b515fa2 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--dark.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-notebooks--dark.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-notebooks--dark.png index 6c94881876e98..61e9a8fef7b8a 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-notebooks--dark.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-notebooks--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--dark.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--dark.png index 87826b03eefc9..8b0dea1d4d341 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--dark.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--light.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--light.png index 478291088babd..56d54473ad193 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--light.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--light.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-no-email--dark.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-no-email--dark.png index 1265e8ee41df6..8127d37fc51e9 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-no-email--dark.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-no-email--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-with-email--dark.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-with-email--dark.png index e7c27e7ec2b9b..cd18b2d8fd720 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-with-email--dark.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-with-email--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-with-email--light.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-with-email--light.png index 72d89788b7ef8..d110b6393cf03 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-with-email--light.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-with-email--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png index e2a4ba2627f60..46075001d93ea 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project--light.png b/frontend/__snapshots__/scenes-other-settings--settings-project--light.png index 50a1f23e6f292..46f92ca1bf6ed 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-project--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png index 17d35a405f035..0318077409a06 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png index 82a2b54538d15..409459f0a3f83 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png index 95204eb281d62..3aa44a392089a 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-user--light.png b/frontend/__snapshots__/scenes-other-settings--settings-user--light.png index 3c87296382dea..5eca09f381ea2 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-user--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-user--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--dark.png new file mode 100644 index 0000000000000..4ba638cb67732 Binary files /dev/null and b/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--light.png b/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--light.png new file mode 100644 index 0000000000000..462ffe9656c70 Binary files /dev/null and b/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--light.png differ diff --git a/frontend/public/hedgehog/sprites/heatmaps.png b/frontend/public/hedgehog/sprites/heatmaps.png deleted file mode 100644 index e03c51fd2d113..0000000000000 Binary files a/frontend/public/hedgehog/sprites/heatmaps.png and /dev/null differ diff --git a/frontend/public/hedgehog/sprites/overlays/fire.png b/frontend/public/hedgehog/sprites/overlays/fire.png new file mode 100644 index 0000000000000..e0a6139aedcb3 Binary files /dev/null and b/frontend/public/hedgehog/sprites/overlays/fire.png differ diff --git a/frontend/public/hedgehog/sprites/action.png b/frontend/public/hedgehog/sprites/skins/default/action.png similarity index 100% rename from frontend/public/hedgehog/sprites/action.png rename to frontend/public/hedgehog/sprites/skins/default/action.png diff --git a/frontend/public/hedgehog/sprites/fall.png b/frontend/public/hedgehog/sprites/skins/default/fall.png similarity index 100% rename from frontend/public/hedgehog/sprites/fall.png rename to frontend/public/hedgehog/sprites/skins/default/fall.png diff --git a/frontend/public/hedgehog/sprites/flag.png b/frontend/public/hedgehog/sprites/skins/default/flag.png similarity index 100% rename from frontend/public/hedgehog/sprites/flag.png rename to frontend/public/hedgehog/sprites/skins/default/flag.png diff --git a/frontend/public/hedgehog/sprites/inspect.png b/frontend/public/hedgehog/sprites/skins/default/inspect.png similarity index 100% rename from frontend/public/hedgehog/sprites/inspect.png rename to frontend/public/hedgehog/sprites/skins/default/inspect.png diff --git a/frontend/public/hedgehog/sprites/jump.png b/frontend/public/hedgehog/sprites/skins/default/jump.png similarity index 100% rename from frontend/public/hedgehog/sprites/jump.png rename to frontend/public/hedgehog/sprites/skins/default/jump.png diff --git a/frontend/public/hedgehog/sprites/phone.png b/frontend/public/hedgehog/sprites/skins/default/phone.png similarity index 100% rename from frontend/public/hedgehog/sprites/phone.png rename to frontend/public/hedgehog/sprites/skins/default/phone.png diff --git a/frontend/public/hedgehog/sprites/sign.png b/frontend/public/hedgehog/sprites/skins/default/sign.png similarity index 100% rename from frontend/public/hedgehog/sprites/sign.png rename to frontend/public/hedgehog/sprites/skins/default/sign.png diff --git a/frontend/public/hedgehog/sprites/walk.png b/frontend/public/hedgehog/sprites/skins/default/walk.png similarity index 100% rename from frontend/public/hedgehog/sprites/walk.png rename to frontend/public/hedgehog/sprites/skins/default/walk.png diff --git a/frontend/public/hedgehog/sprites/wave.png b/frontend/public/hedgehog/sprites/skins/default/wave.png similarity index 100% rename from frontend/public/hedgehog/sprites/wave.png rename to frontend/public/hedgehog/sprites/skins/default/wave.png diff --git a/frontend/public/hedgehog/sprites/skins/spiderhog/fall.png b/frontend/public/hedgehog/sprites/skins/spiderhog/fall.png new file mode 100644 index 0000000000000..575a45dd4a47f Binary files /dev/null and b/frontend/public/hedgehog/sprites/skins/spiderhog/fall.png differ diff --git a/frontend/public/hedgehog/sprites/skins/spiderhog/jump.png b/frontend/public/hedgehog/sprites/skins/spiderhog/jump.png new file mode 100644 index 0000000000000..410a4eec6ae88 Binary files /dev/null and b/frontend/public/hedgehog/sprites/skins/spiderhog/jump.png differ diff --git a/frontend/public/hedgehog/sprites/skins/spiderhog/walk.png b/frontend/public/hedgehog/sprites/skins/spiderhog/walk.png new file mode 100644 index 0000000000000..f456d94a48b94 Binary files /dev/null and b/frontend/public/hedgehog/sprites/skins/spiderhog/walk.png differ diff --git a/frontend/public/hedgehog/sprites/skins/spiderhog/wave.png b/frontend/public/hedgehog/sprites/skins/spiderhog/wave.png new file mode 100644 index 0000000000000..116ce531cbd9c Binary files /dev/null and b/frontend/public/hedgehog/sprites/skins/spiderhog/wave.png differ diff --git a/frontend/public/hedgehog/sprites/spin.png b/frontend/public/hedgehog/sprites/spin.png deleted file mode 100644 index 85ef2ef67ab59..0000000000000 Binary files a/frontend/public/hedgehog/sprites/spin.png and /dev/null differ diff --git a/frontend/public/services/avo.png b/frontend/public/services/avo.png new file mode 100644 index 0000000000000..521158b35aa68 Binary files /dev/null and b/frontend/public/services/avo.png differ diff --git a/frontend/public/services/loops.png b/frontend/public/services/loops.png new file mode 100644 index 0000000000000..822763a10e108 Binary files /dev/null and b/frontend/public/services/loops.png differ diff --git a/frontend/public/services/rudderstack.png b/frontend/public/services/rudderstack.png new file mode 100644 index 0000000000000..11ebb99b677b5 Binary files /dev/null and b/frontend/public/services/rudderstack.png differ diff --git a/frontend/public/services/vitally.png b/frontend/public/services/vitally.png new file mode 100644 index 0000000000000..867ed5e10e908 Binary files /dev/null and b/frontend/public/services/vitally.png differ diff --git a/frontend/src/layout/navigation-3000/Navigation.scss b/frontend/src/layout/navigation-3000/Navigation.scss index e1408ece058d5..f1dafe9a8df79 100644 --- a/frontend/src/layout/navigation-3000/Navigation.scss +++ b/frontend/src/layout/navigation-3000/Navigation.scss @@ -52,6 +52,14 @@ display: flex; flex-direction: column; } + + &.Navigation3000__scene--canvas { + --scene-padding: 0px; + + display: flex; + flex-direction: column; + overflow: hidden; + } } // Navbar diff --git a/frontend/src/layout/navigation-3000/Navigation.tsx b/frontend/src/layout/navigation-3000/Navigation.tsx index 6e1b8b9df3491..a53769d74e67e 100644 --- a/frontend/src/layout/navigation-3000/Navigation.tsx +++ b/frontend/src/layout/navigation-3000/Navigation.tsx @@ -53,7 +53,8 @@ export function Navigation({ className={clsx( 'Navigation3000__scene', // Hack - once we only have 3000 the "minimal" scenes should become "app-raw" - sceneConfig?.layout === 'app-raw' && 'Navigation3000__scene--raw' + sceneConfig?.layout === 'app-raw' && 'Navigation3000__scene--raw', + sceneConfig?.layout === 'app-canvas' && 'Navigation3000__scene--canvas' )} > {!sceneConfig?.hideBillingNotice && } diff --git a/frontend/src/layout/navigation-3000/navigationLogic.tsx b/frontend/src/layout/navigation-3000/navigationLogic.tsx index b21c10bede17a..fb25445924814 100644 --- a/frontend/src/layout/navigation-3000/navigationLogic.tsx +++ b/frontend/src/layout/navigation-3000/navigationLogic.tsx @@ -510,6 +510,14 @@ export const navigation3000Logic = kea([ icon: , to: isUsingSidebar ? undefined : urls.dataWarehouse(), }, + featureFlags[FEATURE_FLAGS.DATA_MODELING] && hasOnboardedAnyProduct + ? { + identifier: Scene.DataModel, + label: 'Data model', + icon: , + to: isUsingSidebar ? undefined : urls.dataModel(), + } + : null, hasOnboardedAnyProduct ? { identifier: Scene.Pipeline, diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index e2406d9b64d22..199696ab127c8 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -1744,7 +1744,7 @@ const api = { errorTracking: { async update( fingerprint: ErrorTrackingGroup['fingerprint'], - data: Partial> + data: Partial> ): Promise { return await new ApiRequest().errorTrackingGroup(fingerprint).update({ data }) }, @@ -2075,6 +2075,18 @@ const api = { ): Promise { return await new ApiRequest().dataWarehouseSavedQuery(viewId).update({ data }) }, + async ancestors(viewId: DataWarehouseSavedQuery['id'], level?: number): Promise> { + return await new ApiRequest() + .dataWarehouseSavedQuery(viewId) + .withAction('ancestors') + .create({ data: { level } }) + }, + async descendants(viewId: DataWarehouseSavedQuery['id'], level?: number): Promise> { + return await new ApiRequest() + .dataWarehouseSavedQuery(viewId) + .withAction('descendants') + .create({ data: { level } }) + }, }, externalDataSources: { async list(options?: ApiMethodOptions | undefined): Promise> { diff --git a/frontend/src/lib/components/ActivityLog/ActivityLog.stories.tsx b/frontend/src/lib/components/ActivityLog/ActivityLog.stories.tsx index 72a1551eb43be..1a549691f9088 100644 --- a/frontend/src/lib/components/ActivityLog/ActivityLog.stories.tsx +++ b/frontend/src/lib/components/ActivityLog/ActivityLog.stories.tsx @@ -7,20 +7,32 @@ import { } from 'lib/components/ActivityLog/__mocks__/activityLogMocks' import { ActivityLog } from 'lib/components/ActivityLog/ActivityLog' -import { mswDecorator } from '~/mocks/browser' +import { mswDecorator, useStorybookMocks } from '~/mocks/browser' +import organizationCurrent from '~/mocks/fixtures/api/organizations/@current/@current.json' import { ActivityScope } from '~/types' const meta: Meta = { title: 'Components/ActivityLog', component: ActivityLog, - tags: ['test-skip'], // FIXME: Currently disabled as the Timeout story is flaky decorators: [ mswDecorator({ get: { - '/api/projects/:team/feature_flags/5/activity': (_, __, ctx) => [ - ctx.delay(86400000), - ctx.status(200), - ctx.json({ results: [] }), + // TODO: setting available featues should be a decorator to make this easy + '/api/users/@me': () => [ + 200, + { + email: 'test@posthog.com', + first_name: 'Test Hedgehog', + organization: { + ...organizationCurrent, + available_product_features: [ + { + key: 'audit_logs', + name: 'Audit logs', + }, + ], + }, + }, ], '/api/projects/:team/feature_flags/6/activity': (_, __, ctx) => [ ctx.status(200), @@ -88,6 +100,21 @@ export function WithNoData(): JSX.Element { return } -export function Timeout(): JSX.Element { - return +export function WithoutAuditLogsFeaure(): JSX.Element { + useStorybookMocks({ + get: { + '/api/users/@me': () => [ + 200, + { + email: 'test@posthog.com', + first_name: 'Test Hedgehog', + organization: { + ...organizationCurrent, + available_product_features: [], + }, + }, + ], + }, + }) + return } diff --git a/frontend/src/lib/components/Cards/CardMeta.tsx b/frontend/src/lib/components/Cards/CardMeta.tsx index fb335097463bb..b8a6656e09e5d 100644 --- a/frontend/src/lib/components/Cards/CardMeta.tsx +++ b/frontend/src/lib/components/Cards/CardMeta.tsx @@ -1,10 +1,13 @@ import './CardMeta.scss' +import { IconPieChart } from '@posthog/icons' import clsx from 'clsx' import { useResizeObserver } from 'lib/hooks/useResizeObserver' import { IconRefresh, IconSubtitles, IconSubtitlesOff } from 'lib/lemon-ui/icons' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { More } from 'lib/lemon-ui/LemonButton/More' +import { Tooltip } from 'lib/lemon-ui/Tooltip' +import React from 'react' import { Transition } from 'react-transition-group' import { InsightColor } from '~/types' @@ -28,7 +31,7 @@ export interface CardMetaProps extends Pick metaDetails?: JSX.Element | null moreButtons?: JSX.Element | null topHeading?: JSX.Element | null - samplingNotice?: JSX.Element | null + samplingFactor?: number | null } export function CardMeta({ @@ -44,7 +47,7 @@ export function CardMeta({ areDetailsShown, setAreDetailsShown, className, - samplingNotice, + samplingFactor, }: CardMetaProps): JSX.Element { const { ref: primaryRef, width: primaryWidth } = useResizeObserver() const { ref: detailsRef, height: detailsHeight } = useResizeObserver() @@ -68,7 +71,20 @@ export function CardMeta({ )}
-
{topHeading}
+
+ {topHeading} + {samplingFactor && samplingFactor < 1 && ( + + + + )} +
{showDetailsControls && setAreDetailsShown && ( )} - {samplingNotice ? samplingNotice : null} {showEditingControls && }
diff --git a/frontend/src/lib/components/Cards/InsightCard/InsightMeta.tsx b/frontend/src/lib/components/Cards/InsightCard/InsightMeta.tsx index 14ae94b88f520..d18c91ca2e111 100644 --- a/frontend/src/lib/components/Cards/InsightCard/InsightMeta.tsx +++ b/frontend/src/lib/components/Cards/InsightCard/InsightMeta.tsx @@ -1,5 +1,3 @@ -// eslint-disable-next-line no-restricted-imports -import { PieChartFilled } from '@ant-design/icons' import { useValues } from 'kea' import { CardMeta } from 'lib/components/Cards/CardMeta' import { TopHeading } from 'lib/components/Cards/InsightCard/TopHeading' @@ -130,13 +128,7 @@ export function InsightMeta({ } metaDetails={} - samplingNotice={ - samplingFactor && samplingFactor < 1 ? ( - - - - ) : null - } + samplingFactor={samplingFactor} moreButtons={ <> <> diff --git a/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddy.scss b/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddy.scss index 29a642d2f7c8a..46c8a466a29c6 100644 --- a/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddy.scss +++ b/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddy.scss @@ -1,5 +1,4 @@ .HedgehogBuddy { - position: fixed; z-index: var(--z-hedgehog-buddy); margin: 0; cursor: pointer; diff --git a/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddy.tsx b/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddy.tsx index 33c2f5b9c51b0..0b8ff1faedf49 100644 --- a/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddy.tsx +++ b/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddy.tsx @@ -1,6 +1,6 @@ import './HedgehogBuddy.scss' -import { ProfilePicture } from '@posthog/lemon-ui' +import { lemonToast, ProfilePicture } from '@posthog/lemon-ui' import clsx from 'clsx' import { useActions, useValues } from 'kea' import { router } from 'kea-router' @@ -18,13 +18,18 @@ import { COLOR_TO_FILTER_MAP, hedgehogBuddyLogic } from './hedgehogBuddyLogic' import { HedgehogOptions } from './HedgehogOptions' import { AccessoryInfo, - baseSpriteAccessoriesPath, - baseSpritePath, + AnimationName, + OverlayAnimationName, + overlayAnimations, SHADOW_HEIGHT, + skins, SPRITE_SHEET_WIDTH, SPRITE_SIZE, + spriteAccessoryUrl, + SpriteInfo, + spriteOverlayUrl, + spriteUrl, standardAccessories, - standardAnimations, } from './sprites/sprites' export const X_FRAMES = SPRITE_SHEET_WIDTH / SPRITE_SIZE @@ -32,17 +37,14 @@ export const FPS = 24 const GRAVITY_PIXELS = 10 const MAX_JUMP_COUNT = 2 -const randomChoiceList: string[] = Object.keys(standardAnimations).reduce((acc: string[], key: string) => { - return [...acc, ...range(standardAnimations[key].randomChance || 0).map(() => key)] -}, []) - export type HedgehogBuddyProps = { onActorLoaded?: (actor: HedgehogActor) => void - onClose?: () => void - onClick?: () => void + onClose?: (actor: HedgehogActor) => void + onClick?: (actor: HedgehogActor) => void onPositionChange?: (actor: HedgehogActor) => void hedgehogConfig?: HedgehogConfig tooltip?: JSX.Element + static?: boolean } type Box = { @@ -73,46 +75,62 @@ const elementToBox = (element: Element): Box => { } } +type AnimationState = { + name: string + frame: number + iterations: number | null + spriteInfo: SpriteInfo + onComplete?: () => boolean | void +} + export class HedgehogActor { element?: HTMLDivElement | null - animations = standardAnimations direction: 'left' | 'right' = 'right' - startX = Math.min(Math.max(0, Math.floor(Math.random() * window.innerWidth)), window.innerWidth - SPRITE_SIZE) - startY = Math.min(Math.max(0, Math.floor(Math.random() * window.innerHeight)), window.innerHeight - SPRITE_SIZE) + startX = 0 + startY = 0 x = this.startX y = this.startY + followMouse = false + lastKnownMousePosition: [number, number] | null = null isDragging = false isControlledByUser = false yVelocity = -30 // Appears as if jumping out of thin air xVelocity = 0 ground: Element | null = null jumpCount = 0 - animationName: string = 'fall' - animation = this.animations[this.animationName] - animationFrame = 0 - animationIterations: number | null = null - animationCompletionHandler?: () => boolean | void + mainAnimation: AnimationState | null = null + overlayAnimation: AnimationState | null = null + gravity = GRAVITY_PIXELS ignoreGroundAboveY?: number showTooltip = false lastScreenPosition = [window.screenX, window.screenY + window.innerHeight] + static = false // properties synced with the logic hedgehogConfig: Partial = {} tooltip?: JSX.Element constructor() { + this.log('Created new HedgehogActor') + + this.startX = Math.min( + Math.max(0, Math.floor(Math.random() * window.innerWidth)), + window.innerWidth - SPRITE_SIZE + ) + this.startY = Math.min( + Math.max(0, Math.floor(Math.random() * window.innerHeight)), + window.innerHeight - SPRITE_SIZE + ) this.setAnimation('fall') } - private accessories(): AccessoryInfo[] { - return this.hedgehogConfig.accessories?.map((acc) => standardAccessories[acc]) ?? [] + animations(): { [key: string]: SpriteInfo } { + const animations = skins[this.hedgehogConfig.skin || 'default'] + return animations } - private getAnimationOptions(): string[] { - if (!this.hedgehogConfig.walking_enabled) { - return randomChoiceList.filter((x) => x !== 'walk') - } - return randomChoiceList + private accessories(): AccessoryInfo[] { + return this.hedgehogConfig.accessories?.map((acc) => standardAccessories[acc]) ?? [] } private log(message: string, ...args: any[]): void { @@ -124,21 +142,68 @@ export class HedgehogActor { setOnFire(times = 3): void { this.log('setting on fire, iterations remaining:', times) - this.setAnimation('heatmaps', { + this.setOverlayAnimation('fire', { onComplete: () => { if (times == 1) { - return + this.setOverlayAnimation(null) + } else { + this.setOnFire(times - 1) } - this.setOnFire(times - 1) - return true }, }) + + this.setAnimation('stop', {}) this.direction = sampleOne(['left', 'right']) this.xVelocity = this.direction === 'left' ? -5 : 5 this.jump() } setupKeyboardListeners(): () => void { + const lastKeys: string[] = [] + + const secretMap: { + keys: string[] + action: () => void + }[] = [ + { + keys: ['f', 'f', 'f'], + action: () => this.setOnFire(), + }, + { + keys: ['f', 'i', 'r', 'e'], + action: () => this.setOnFire(), + }, + { + keys: ['s', 'p', 'i', 'd', 'e', 'r', 'h', 'o', 'g'], + action: () => { + this.hedgehogConfig.skin = 'spiderhog' + }, + }, + { + keys: [ + 'arrowup', + 'arrowup', + 'arrowdown', + 'arrowdown', + 'arrowleft', + 'arrowright', + 'arrowleft', + 'arrowright', + 'b', + 'a', + ], + action: () => { + this.setOnFire() + this.gravity = -2 + + lemonToast.info('I must leave. My people need me!') + setTimeout(() => { + this.gravity = GRAVITY_PIXELS + }, 2000) + }, + }, + ] + const keyDownListener = (e: KeyboardEvent): void => { if (shouldIgnoreInput(e) || !this.hedgehogConfig.controls_enabled) { return @@ -146,13 +211,25 @@ export class HedgehogActor { const key = e.key.toLowerCase() + lastKeys.push(key) + if (lastKeys.length > 20) { + lastKeys.shift() + } + if ([' ', 'w', 'arrowup'].includes(key)) { this.jump() } + secretMap.forEach((secret) => { + if (lastKeys.slice(-secret.keys.length).join('') === secret.keys.join('')) { + secret.action() + lastKeys.splice(-secret.keys.length) + } + }) + if (['arrowdown', 's'].includes(key)) { if (this.ground === document.body) { - if (this.animationName !== 'wave') { + if (this.mainAnimation?.name !== 'wave') { this.setAnimation('wave') } } else if (this.ground) { @@ -165,7 +242,7 @@ export class HedgehogActor { if (['arrowleft', 'a', 'arrowright', 'd'].includes(key)) { this.isControlledByUser = true - if (this.animationName !== 'walk') { + if (this.mainAnimation?.name !== 'walk') { this.setAnimation('walk') } @@ -178,8 +255,6 @@ export class HedgehogActor { // Moonwalking is hard so he moves slightly slower of course this.xVelocity *= 0.8 } - - this.animationIterations = null } } @@ -191,14 +266,51 @@ export class HedgehogActor { const key = e.key.toLowerCase() if (['arrowleft', 'a', 'arrowright', 'd'].includes(key)) { - this.setAnimation('stop') - this.animationIterations = FPS * 2 // Wait 2 seconds before doing something else + this.setAnimation('stop', { + iterations: FPS * 2, + }) this.isControlledByUser = false } } + const onMouseDown = (e: MouseEvent): void => { + if (!this.hedgehogConfig.controls_enabled || this.hedgehogConfig.skin !== 'spiderhog') { + return + } + + // Whilst the mouse is down we will move the hedgehog towards it + // First check that we haven't clicked the hedgehog + const elementBounds = this.element?.getBoundingClientRect() + if ( + elementBounds && + e.clientX >= elementBounds.left && + e.clientX <= elementBounds.right && + e.clientY >= elementBounds.top && + e.clientY <= elementBounds.bottom + ) { + return + } + + this.setAnimation('fall') + this.followMouse = true + this.lastKnownMousePosition = [e.clientX, e.clientY] + + const onMouseMove = (e: MouseEvent): void => { + this.lastKnownMousePosition = [e.clientX, e.clientY] + } + + const onMouseUp = (): void => { + this.followMouse = false + window.removeEventListener('mousemove', onMouseMove) + } + + window.addEventListener('mousemove', onMouseMove) + window.addEventListener('mouseup', onMouseUp) + } + window.addEventListener('keydown', keyDownListener) window.addEventListener('keyup', keyUpListener) + window.addEventListener('mousedown', onMouseDown) return () => { window.removeEventListener('keydown', keyDownListener) @@ -206,28 +318,27 @@ export class HedgehogActor { } } - setAnimation( - animationName: string, - options?: { - onComplete: () => boolean | void - } - ): void { - this.animationName = animationName - this.animation = this.animations[animationName] - this.animationFrame = 0 - this.animationCompletionHandler = () => { - this.animationCompletionHandler = undefined - - return options?.onComplete() - } - if (this.animationName !== 'stop') { - this.direction = this.animation.forceDirection || sampleOne(['left', 'right']) + setAnimation(animationName: AnimationName, options?: Partial): void { + const availableAnimations = this.animations() + animationName = availableAnimations[animationName] ? animationName : 'stop' + const spriteInfo = availableAnimations[animationName] + + this.mainAnimation = { + name: animationName, + frame: 0, + iterations: spriteInfo.maxIteration ?? null, + spriteInfo, + onComplete: options?.onComplete, } // Set a random number of iterations or infinite for certain situations - this.animationIterations = this.animation.maxIteration - ? Math.max(1, Math.floor(Math.random() * this.animation.maxIteration)) - : null + this.mainAnimation.iterations = + options?.iterations ?? + (spriteInfo.maxIteration ? Math.max(1, Math.floor(Math.random() * spriteInfo.maxIteration)) : null) + + if (this.mainAnimation.name !== 'stop') { + this.direction = this.mainAnimation.spriteInfo.forceDirection || sampleOne(['left', 'right']) + } if (animationName === 'walk') { this.xVelocity = this.direction === 'left' ? -1 : 1 @@ -236,19 +347,52 @@ export class HedgehogActor { } if ((window as any)._posthogDebugHedgehog) { - const duration = this.animationIterations - ? this.animationIterations * this.animation.frames * (1000 / FPS) - : '∞' + const duration = + this.mainAnimation.iterations !== null + ? this.mainAnimation.iterations * spriteInfo.frames * (1000 / FPS) + : '∞' - this.log(`Will '${this.animationName}' for ${duration}ms`) + this.log(`Will '${this.mainAnimation.name}' for ${duration}ms`) + } + } + + setOverlayAnimation( + animationName: OverlayAnimationName | null, + options?: { + onComplete: () => boolean | void + } + ): void { + if (!animationName) { + this.overlayAnimation = null + return + } + const spriteInfo = overlayAnimations[animationName] + if (!spriteInfo) { + this.log(`Overlay animation '${animationName}' not found`) + return + } + + this.overlayAnimation = { + name: animationName, + frame: 0, + iterations: 1, + spriteInfo, + onComplete: options?.onComplete ?? (() => this.setOverlayAnimation(null)), } } setRandomAnimation(): void { - if (this.animationName !== 'stop') { + if (this.mainAnimation?.name !== 'stop') { this.setAnimation('stop') } else { - this.setAnimation(sampleOne(this.getAnimationOptions())) + let randomChoiceList = Object.keys(this.animations()).reduce((acc, key) => { + return [...acc, ...range(this.animations()[key].randomChance || 0).map(() => key)] as AnimationName[] + }, [] as AnimationName[]) + + randomChoiceList = this.hedgehogConfig.walking_enabled + ? randomChoiceList + : randomChoiceList.filter((x) => x !== 'walk') + this.setAnimation(sampleOne(randomChoiceList)) } } @@ -258,7 +402,7 @@ export class HedgehogActor { } this.ground = null this.jumpCount += 1 - this.yVelocity = GRAVITY_PIXELS * 5 + this.yVelocity = this.gravity * 5 } update(): void { @@ -283,11 +427,11 @@ export class HedgehogActor { if (screenMoveY < 0) { // If the ground has moved up relative to the hedgehog we need to make him jump - this.yVelocity = Math.max(this.yVelocity + screenMoveY * 10, -GRAVITY_PIXELS * 20) + this.yVelocity = Math.max(this.yVelocity + screenMoveY * 10, -this.gravity * 20) } if (screenMoveX !== 0) { - if (this.animationName !== 'stop') { + if (this.mainAnimation?.name !== 'stop') { this.setAnimation('stop') } // Somewhat random numbers here to find what felt fun @@ -297,30 +441,53 @@ export class HedgehogActor { this.applyVelocity() - // Ensure we are falling or not - if (this.animationName === 'fall' && !this.isFalling()) { - this.setAnimation('stop') - } + if (this.mainAnimation) { + // Ensure we are falling or not + if (this.mainAnimation.name === 'fall' && !this.isFalling()) { + this.setAnimation('stop') + } - this.animationFrame++ + this.mainAnimation.frame++ - if (this.animationFrame >= this.animation.frames) { - // End of the animation - if (this.animationIterations !== null) { - this.animationIterations -= 1 - } + if (this.mainAnimation.frame >= this.mainAnimation.spriteInfo.frames) { + this.mainAnimation.frame = 0 + // End of the animation + if (this.mainAnimation.iterations !== null) { + this.mainAnimation.iterations -= 1 + } + + if (this.mainAnimation.iterations === 0) { + this.mainAnimation.iterations = null + // End of the animation, set the next one - if (this.animationIterations === 0) { - this.animationIterations = null - // End of the animation, set the next one + const preventNextAnimation = this.mainAnimation.onComplete?.() - const preventNextAnimation = this.animationCompletionHandler?.() - if (!preventNextAnimation) { - this.setRandomAnimation() + if (!preventNextAnimation) { + if (this.static) { + this.setAnimation('stop') + } else { + this.setRandomAnimation() + } + } } } + } - this.animationFrame = 0 + if (this.overlayAnimation) { + this.overlayAnimation.frame++ + + if (this.overlayAnimation.frame >= this.overlayAnimation.spriteInfo.frames) { + this.overlayAnimation.frame = 0 + // End of the animation + if (this.overlayAnimation.iterations !== null) { + this.overlayAnimation.iterations -= 1 + } + + if (this.overlayAnimation.iterations === 0) { + this.overlayAnimation.iterations = null + this.overlayAnimation.onComplete?.() + } + } } if (this.isDragging) { @@ -352,11 +519,39 @@ export class HedgehogActor { return } + if (this.followMouse) { + this.ground = null + const [clientX, clientY] = this.lastKnownMousePosition ?? [0, 0] + + const xDiff = clientX - this.x + const yDiff = window.innerHeight - clientY - this.y + + const distance = Math.sqrt(xDiff ** 2 + yDiff ** 2) + const speed = 3 + const ratio = speed / distance + + if (yDiff < 0) { + this.yVelocity -= this.gravity + } + + this.yVelocity += yDiff * ratio + this.xVelocity += xDiff * ratio + this.y = this.y + this.yVelocity + if (this.y < 0) { + this.y = 0 + this.yVelocity = -this.yVelocity * 0.4 + } + this.x = this.x + this.xVelocity + this.direction = this.xVelocity > 0 ? 'right' : 'left' + + return + } + this.ground = this.findGround() - this.yVelocity -= GRAVITY_PIXELS + this.yVelocity -= this.gravity // We decelerate the x velocity if the hedgehog is stopped - if (['stop'].includes(this.animationName) && !this.isControlledByUser) { + if (!this.isControlledByUser && this.mainAnimation?.name !== 'walk' && this.onGround()) { this.xVelocity = this.xVelocity * 0.6 } @@ -445,6 +640,9 @@ export class HedgehogActor { } private onGround(): boolean { + if (this.static) { + return true + } if (this.ground) { const groundLevel = elementToBox(this.ground).y + elementToBox(this.ground).height return this.y <= groundLevel @@ -457,15 +655,45 @@ export class HedgehogActor { return !this.onGround() && Math.abs(this.yVelocity) > 1 } + renderRope(): JSX.Element | null { + if (!this.lastKnownMousePosition) { + return null + } + + // We position the rope to roughly where the hand should be + const x = this.x + SPRITE_SIZE / 2 + const y = this.y + SPRITE_SIZE / 2 + const mouseX = this.lastKnownMousePosition[0] + // Y coords are inverted + const mouseY = window.innerHeight - this.lastKnownMousePosition[1] + + return ( +
+ ) + } + render({ onClick, ref }: { onClick: () => void; ref: ForwardedRef }): JSX.Element { - const accessoryPosition = this.animation.accessoryPositions?.[this.animationFrame] + const accessoryPosition = this.mainAnimation?.spriteInfo.accessoryPositions?.[this.mainAnimation.frame] const preloadContent = - Object.values(this.animations) - .map((x) => `url(${baseSpritePath()}/${x.img}.png)`) + Object.values(this.animations()) + .map((x) => `url(${spriteUrl(this.hedgehogConfig.skin ?? 'default', x.img)})`) .join(' ') + ' ' + this.accessories() - .map((accessory) => `url(${baseSpriteAccessoriesPath}/${accessory.img}.png)`) + .map((accessory) => `url(${spriteAccessoryUrl(accessory.img)})`) .join(' ') const imageFilter = this.hedgehogConfig.color ? COLOR_TO_FILTER_MAP[this.hedgehogConfig.color] : undefined @@ -549,16 +777,17 @@ export class HedgehogActor { }} className="HedgehogBuddy" data-content={preloadContent} - onTouchStart={() => onTouchOrMouseStart()} - onMouseDown={() => onTouchOrMouseStart()} + onTouchStart={this.static ? undefined : () => onTouchOrMouseStart()} + onMouseDown={this.static ? undefined : () => onTouchOrMouseStart()} onMouseOver={() => (this.showTooltip = true)} onMouseOut={() => (this.showTooltip = false)} + onClick={this.static ? onClick : undefined} // eslint-disable-next-line react/forbid-dom-props style={{ - position: 'fixed', - left: this.x, - bottom: this.y - SHADOW_HEIGHT * 0.5, - transition: !this.isDragging ? `all ${1000 / FPS}ms` : undefined, + position: this.static ? 'relative' : 'fixed', + left: this.static ? undefined : this.x, + bottom: this.static ? undefined : this.y - SHADOW_HEIGHT * 0.5, + transition: !(this.isDragging || this.followMouse) ? `all ${1000 / FPS}ms` : undefined, cursor: 'pointer', margin: 0, }} @@ -585,19 +814,27 @@ export class HedgehogActor { transform: `scaleX(${this.direction === 'right' ? 1 : -1})`, }} > -
+ {this.mainAnimation ? ( +
+ ) : null} + {this.accessories().map((accessory, index) => (
))} + {this.overlayAnimation ? ( +
+ ) : null}
+ {this.renderRope()} + {(window as any)._posthogDebugHedgehog && ( <> {[this.element && elementToBox(this.element), this.ground && elementToBox(this.ground)].map( @@ -651,7 +908,7 @@ export class HedgehogActor { } export const HedgehogBuddy = React.forwardRef(function HedgehogBuddy( - { onActorLoaded, onClick: _onClick, onPositionChange, hedgehogConfig, tooltip }, + { onActorLoaded, onClick: _onClick, onPositionChange, hedgehogConfig, tooltip, static: staticMode }, ref ): JSX.Element { const actorRef = useRef() @@ -682,6 +939,10 @@ export const HedgehogBuddy = React.forwardRef { + actor.static = staticMode ?? false + }, [staticMode]) + useEffect(() => { let timer: any = null @@ -712,7 +973,7 @@ export const HedgehogBuddy = React.forwardRef { - !actor.isDragging && _onClick?.() + !actor.isDragging && _onClick?.(actor) } return actor.render({ onClick, ref }) @@ -734,16 +995,19 @@ export function MyHedgehogBuddy({ const [popoverVisible, setPopoverVisible] = useState(false) - const onClick = (): void => { + const onClick = (actor: HedgehogActor): void => { setPopoverVisible(!popoverVisible) - _onClick?.() + _onClick?.(actor) } const disappear = (): void => { setPopoverVisible(false) - actor?.setAnimation('wave') - setTimeout(() => onClose?.(), (actor!.animations.wave.frames * 1000) / FPS) + actor?.setAnimation('wave', { + onComplete() { + onClose?.(actor) + return true + }, + }) } - return ( setPopoverVisible(false)} @@ -797,7 +1061,11 @@ export function MemberHedgehogBuddy({ member }: { member: OrganizationMemberType const memberHedgehogConfig: HedgehogConfig = useMemo( () => ({ ...hedgehogConfig, + // Reset some params to default + skin: 'default', + // Then apply the user's config ...member.user.hedgehog_config, + // Finally some settings are forced controls_enabled: false, }), [hedgehogConfig, member.user.hedgehog_config] diff --git a/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddyRender.tsx b/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddyRender.tsx index 337dc6744b1bf..8c766f9bda66e 100644 --- a/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddyRender.tsx +++ b/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddyRender.tsx @@ -1,55 +1,22 @@ -import { useEffect, useRef, useState } from 'react' - import { HedgehogConfig } from '~/types' -import { FPS, X_FRAMES } from './HedgehogBuddy' import { COLOR_TO_FILTER_MAP } from './hedgehogBuddyLogic' -import { - baseSpriteAccessoriesPath, - baseSpritePath, - SPRITE_SIZE, - standardAccessories, - standardAnimations, -} from './sprites/sprites' +import { spriteAccessoryUrl, spriteUrl, standardAccessories } from './sprites/sprites' -export type HedgehogBuddyStaticProps = Partial & { size?: number | string; waveOnAppearance?: boolean } +export type HedgehogBuddyStaticProps = Partial & { size?: number | string } // Takes a range of options and renders a static hedgehog export function HedgehogBuddyStatic({ accessories, color, size, - waveOnAppearance, + skin = 'default', }: HedgehogBuddyStaticProps): JSX.Element { const imgSize = size ?? 60 const accessoryInfos = accessories?.map((x) => standardAccessories[x]) const filter = color ? COLOR_TO_FILTER_MAP[color] : null - const [animationIteration, setAnimationIteration] = useState(waveOnAppearance ? 1 : 0) - const [_, setTimerLoop] = useState(0) - const animationFrameRef = useRef(0) - - useEffect(() => { - if (animationIteration) { - setTimerLoop(0) - let timer: any = null - const loop = (): void => { - if (animationFrameRef.current < standardAnimations.wave.frames) { - animationFrameRef.current++ - timer = setTimeout(loop, 1000 / FPS) - } else { - animationFrameRef.current = 0 - } - setTimerLoop((x) => x + 1) - } - loop() - return () => { - clearTimeout(timer) - } - } - }, [animationIteration]) - return (
setAnimationIteration((x) => x + 1) : undefined} >
{accessoryInfos?.map((accessory, index) => (

Hi, I'm Max!

- Don't mind me. I'm just here to keep you company. -
- You can move me around by clicking and dragging or control me with WASD / arrow keys. + {hedgehogConfig.skin === 'spiderhog' ? ( + <> + Well, it’s not every day you meet a hedgehog with spider powers. Yep, that's me - + SpiderHog. I wasn’t always this way. Just your average, speedy little guy until a + radioactive spider bit me. With great power comes great responsibility, so buckle up, + because this hedgehog’s got a whole data warehouse to protect... +
+ You can move me around by clicking and dragging or control me with WASD / arrow keys and + I'll use your mouse as a web slinging target. + + ) : ( + <> + Don't mind me. I'm just here to keep you company. +
+ You can move me around by clicking and dragging or control me with WASD / arrow keys. + + )}

@@ -138,13 +155,33 @@ function HedgehogAccessories(): JSX.Element { function HedgehogColor(): JSX.Element { const { hedgehogConfig } = useValues(hedgehogBuddyLogic) const { patchHedgehogConfig } = useActions(hedgehogBuddyLogic) + const skinSpiderHogEnabled = !!useFeatureFlag('HEDGEHOG_SKIN_SPIDERHOG') + + const skins: HedgehogSkin[] = skinSpiderHogEnabled ? ['default', 'spiderhog'] : ['default'] return ( <> -

Colors

+

Skins and colors

- {[null, ...Object.keys(COLOR_TO_FILTER_MAP)].map((option) => ( + {skins.map((option) => ( + patchHedgehogConfig({ skin: option as any, color: null })} + noPadding + tooltip={<>{capitalizeFirstLetter(option ?? 'default')}} + > + + + ))} + {[...Object.keys(COLOR_TO_FILTER_MAP)].map((option) => ( patchHedgehogConfig({ color: option as any })} + onClick={() => patchHedgehogConfig({ color: option as any, skin: 'default' })} noPadding tooltip={<>{capitalizeFirstLetter(option ?? 'default')}} > diff --git a/frontend/src/lib/components/HedgehogBuddy/sprites/sprites.tsx b/frontend/src/lib/components/HedgehogBuddy/sprites/sprites.tsx index 273b97a59ead5..46c30dcbcd866 100644 --- a/frontend/src/lib/components/HedgehogBuddy/sprites/sprites.tsx +++ b/frontend/src/lib/components/HedgehogBuddy/sprites/sprites.tsx @@ -1,10 +1,13 @@ +import { inStorybook } from 'lib/utils' + import { toolbarConfigLogic } from '~/toolbar/toolbarConfigLogic' +import { HedgehogSkin } from '~/types' export const SPRITE_SIZE = 80 export const SHADOW_HEIGHT = SPRITE_SIZE / 8 export const SPRITE_SHEET_WIDTH = SPRITE_SIZE * 8 -type SpriteInfo = { +export type SpriteInfo = { /** Number of frames in this sprite sheet */ frames: number /** Path to the sprite sheet */ @@ -16,6 +19,8 @@ type SpriteInfo = { /** How likely this animation is to be chosen. Higher numbers are more likely. */ randomChance?: number accessoryPositions?: [number, number][] + filter?: string + style?: React.CSSProperties } export const accessoryGroups = ['headwear', 'eyewear', 'other'] as const @@ -28,11 +33,13 @@ export type AccessoryInfo = { // If loaded via the toolbar the root domain won't be app.posthog.com and so the assets won't load // Simple workaround is we detect if the domain is localhost and if not we just use https://us.posthog.com -export const baseSpritePath = (): string => { +const baseSpritePath = (): string => { let path = `/static/hedgehog/sprites` const toolbarAPIUrl = toolbarConfigLogic.findMounted()?.values.apiURL - if (window.location.hostname !== 'localhost') { + if (inStorybook()) { + // Nothing to do + } else if (window.location.hostname !== 'localhost') { path = `https://us.posthog.com${path}` } else if (toolbarAPIUrl) { path = `${toolbarAPIUrl}${path}` @@ -40,9 +47,24 @@ export const baseSpritePath = (): string => { return path } -export const baseSpriteAccessoriesPath = (): string => `${baseSpritePath()}/accessories` +const baseSpriteAccessoriesPath = (): string => `${baseSpritePath()}/accessories` + +export const spriteUrl = (skin: HedgehogSkin, img: string): string => { + return `${baseSpritePath()}/skins/${skin}/${img}.png` +} -export const standardAnimations: { [key: string]: SpriteInfo } = { +export const spriteOverlayUrl = (img: string): string => { + return `${baseSpritePath()}/overlays/${img}.png` +} + +export const spriteAccessoryUrl = (img: string): string => { + return `${baseSpriteAccessoriesPath()}/${img}.png` +} + +const animationsNames = ['stop', 'fall', 'jump', 'sign', 'walk', 'wave', 'flag', 'inspect', 'phone', 'action'] as const +export type AnimationName = (typeof animationsNames)[number] + +const standardAnimations: Record = { stop: { img: 'wave', frames: 1, @@ -92,12 +114,6 @@ export const standardAnimations: { [key: string]: SpriteInfo } = { maxIteration: 1, randomChance: 2, }, - heatmaps: { - img: 'heatmaps', - frames: 14, - maxIteration: 1, - randomChance: 0, - }, flag: { img: 'flag', frames: 25, @@ -124,6 +140,21 @@ export const standardAnimations: { [key: string]: SpriteInfo } = { }, } +const overlayAnimationsNames = ['fire'] as const + +export type OverlayAnimationName = (typeof overlayAnimationsNames)[number] + +export const overlayAnimations: Record = { + fire: { + img: 'fire', + frames: 14, + maxIteration: 1, + style: { + opacity: 0.75, + }, + }, +} + export const standardAccessories: { [key: string]: AccessoryInfo } = { beret: { img: 'beret', @@ -191,3 +222,14 @@ export const standardAccessories: { [key: string]: AccessoryInfo } = { group: 'other', }, } + +export const skins: Record = { + default: standardAnimations, + spiderhog: { + stop: standardAnimations.stop, + fall: standardAnimations.fall, + jump: standardAnimations.jump, + walk: standardAnimations.walk, + wave: standardAnimations.wave, + }, +} diff --git a/frontend/src/lib/components/JSSnippet.tsx b/frontend/src/lib/components/JSSnippet.tsx index 5c0c32bbdb9b4..05cbf0cfb6139 100644 --- a/frontend/src/lib/components/JSSnippet.tsx +++ b/frontend/src/lib/components/JSSnippet.tsx @@ -13,7 +13,7 @@ export function snippetFunctions(): string { if ( typeof posthogPrototype[key] === 'function' && !key.startsWith('_') && - !['constructor', 'toString'].includes(key) + !['constructor', 'toString', 'push'].includes(key) ) { methods.push(key) } diff --git a/frontend/src/lib/components/SmoothingFilter/SmoothingFilter.tsx b/frontend/src/lib/components/SmoothingFilter/SmoothingFilter.tsx index 1149bb6dbe7b0..f2f8994434f69 100644 --- a/frontend/src/lib/components/SmoothingFilter/SmoothingFilter.tsx +++ b/frontend/src/lib/components/SmoothingFilter/SmoothingFilter.tsx @@ -1,5 +1,4 @@ -// eslint-disable-next-line no-restricted-imports -import { FundOutlined } from '@ant-design/icons' +import { IconPulse } from '@posthog/icons' import { LemonSelect } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { insightLogic } from 'scenes/insights/insightLogic' @@ -25,7 +24,8 @@ export function SmoothingFilter(): JSX.Element | null { label: value === smoothingIntervals ? ( <> - {label} + + {label} ) : ( label diff --git a/frontend/src/lib/components/Support/supportLogic.ts b/frontend/src/lib/components/Support/supportLogic.ts index d947dc10e6723..0a9ae9337de96 100644 --- a/frontend/src/lib/components/Support/supportLogic.ts +++ b/frontend/src/lib/components/Support/supportLogic.ts @@ -103,6 +103,11 @@ export const TARGET_AREA_TO_NAME = [ 'data-attr': `support-form-target-area-onboarding`, label: 'Onboarding', }, + { + value: 'sdk', + 'data-attr': `support-form-target-area-onboarding`, + label: 'SDK / Implementation', + }, { value: 'cohorts', 'data-attr': `support-form-target-area-cohorts`, diff --git a/frontend/src/lib/components/TZLabel/index.tsx b/frontend/src/lib/components/TZLabel/index.tsx index 8da75e0ccd3c2..0f237f21db989 100644 --- a/frontend/src/lib/components/TZLabel/index.tsx +++ b/frontend/src/lib/components/TZLabel/index.tsx @@ -1,8 +1,6 @@ import './index.scss' -// eslint-disable-next-line no-restricted-imports -import { LaptopOutlined, ProjectOutlined } from '@ant-design/icons' -import { IconGear } from '@posthog/icons' +import { IconGear, IconHome, IconLaptop } from '@posthog/icons' import { LemonButton, LemonDivider, LemonDropdown, LemonDropdownProps } from '@posthog/lemon-ui' import clsx from 'clsx' import { useActions, useValues } from 'kea' @@ -57,7 +55,7 @@ const TZLabelPopoverContent = React.memo(function TZLabelPopoverContent({
- +
Your device
{shortTimeZone(undefined, time.toDate())}
@@ -66,7 +64,7 @@ const TZLabelPopoverContent = React.memo(function TZLabelPopoverContent({ {currentTeam && (
- +
Project
{shortTimeZone(currentTeam.timezone, time.toDate())}
diff --git a/frontend/src/lib/components/VersionChecker/VersionCheckerBanner.tsx b/frontend/src/lib/components/VersionChecker/VersionCheckerBanner.tsx index fe904748da643..8e7e72efe1b40 100644 --- a/frontend/src/lib/components/VersionChecker/VersionCheckerBanner.tsx +++ b/frontend/src/lib/components/VersionChecker/VersionCheckerBanner.tsx @@ -1,10 +1,12 @@ import { useValues } from 'kea' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' +import { teamLogic } from 'scenes/teamLogic' import { versionCheckerLogic } from './versionCheckerLogic' export function VersionCheckerBanner(): JSX.Element | null { - const { versionWarning } = useValues(versionCheckerLogic) + const { currentTeamId } = useValues(teamLogic) + const { versionWarning } = useValues(versionCheckerLogic({ teamId: currentTeamId })) if (!versionWarning) { return null } diff --git a/frontend/src/lib/components/VersionChecker/versionCheckerLogic.ts b/frontend/src/lib/components/VersionChecker/versionCheckerLogic.ts index 3a86eedbd95a9..7ffecbbf89c82 100644 --- a/frontend/src/lib/components/VersionChecker/versionCheckerLogic.ts +++ b/frontend/src/lib/components/VersionChecker/versionCheckerLogic.ts @@ -1,4 +1,4 @@ -import { actions, afterMount, kea, listeners, path, reducers, sharedListeners } from 'kea' +import { actions, afterMount, kea, key, listeners, path, props, reducers, sharedListeners } from 'kea' import { loaders } from 'kea-loaders' import api from 'lib/api' import { isNotNil } from 'lib/utils' @@ -45,8 +45,14 @@ export interface AvailableVersions { deprecation?: PosthogJSDeprecation } +export interface VersionCheckerLogicProps { + teamId: number | null +} + export const versionCheckerLogic = kea([ - path(['components', 'VersionChecker', 'versionCheckerLogic']), + props({ teamId: null } as VersionCheckerLogicProps), + key(({ teamId }) => teamId || 'no-team-id'), + path((key) => ['components', 'VersionChecker', 'versionCheckerLogic', key]), actions({ setVersionWarning: (versionWarning: SDKVersionWarning | null) => ({ versionWarning }), setSdkVersions: (sdkVersions: SDKVersion[]) => ({ sdkVersions }), diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx index 2a8b466400bdb..f5e94a2e13af6 100644 --- a/frontend/src/lib/constants.tsx +++ b/frontend/src/lib/constants.tsx @@ -205,13 +205,16 @@ export const FEATURE_FLAGS = { MULTIPLE_BREAKDOWNS: 'multiple-breakdowns', // owner: @skoob13 #team-product-analytics WEB_ANALYTICS_LIVE_USER_COUNT: 'web-analytics-live-user-count', // owner: @robbie-c SETTINGS_SESSION_TABLE_VERSION: 'settings-session-table-version', // owner: @robbie-c + INSIGHT_FUNNELS_USE_UDF: 'insight-funnels-use-udf', // owner: @aspicer #team-product-analytics FIRST_TIME_FOR_USER_MATH: 'first-time-for-user-math', // owner: @skoob13 #team-product-analytics MULTITAB_EDITOR: 'multitab-editor', // owner: @EDsCODE #team-data-warehouse WEB_ANALYTICS_REPLAY: 'web-analytics-replay', // owner: @robbie-c BATCH_EXPORTS_POSTHOG_HTTP: 'posthog-http-batch-exports', EXPERIMENT_MAKE_DECISION: 'experiment-make-decision', // owner: @jurajmajerik #team-feature-success + DATA_MODELING: 'data-modeling', // owner: @EDsCODE #team-data-warehouse WEB_ANALYTICS_CONVERSION_GOALS: 'web-analytics-conversion-goals', // owner: @robbie-c WEB_ANALYTICS_LAST_CLICK: 'web-analytics-last-click', // owner: @robbie-c + HEDGEHOG_SKIN_SPIDERHOG: 'hedgehog-skin-spiderhog', // owner: @benjackwhite } as const export type FeatureFlagKey = (typeof FEATURE_FLAGS)[keyof typeof FEATURE_FLAGS] diff --git a/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.tsx b/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.tsx index 32b569226dcaa..ed3bd39277311 100644 --- a/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.tsx +++ b/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.tsx @@ -96,6 +96,7 @@ interface LemonCollapsePanelProps { onChange: (isExpanded: boolean) => void className?: string dataAttr?: string + onHeaderClick?: () => void } function LemonCollapsePanel({ @@ -106,13 +107,17 @@ function LemonCollapsePanel({ className, dataAttr, onChange, + onHeaderClick, }: LemonCollapsePanelProps): JSX.Element { const { height: contentHeight, ref: contentRef } = useResizeObserver({ box: 'border-box' }) return (
onChange(!isExpanded)} + onClick={() => { + onHeaderClick && onHeaderClick() + onChange(!isExpanded) + }} icon={isExpanded ? : } className="LemonCollapsePanel__header" {...(dataAttr ? { 'data-attr': dataAttr } : {})} diff --git a/frontend/src/lib/utils.tsx b/frontend/src/lib/utils.tsx index b95beb0d7ecfb..984708383ed16 100644 --- a/frontend/src/lib/utils.tsx +++ b/frontend/src/lib/utils.tsx @@ -1676,6 +1676,10 @@ export function inStorybookTestRunner(): boolean { return navigator.userAgent.includes('StorybookTestRunner') } +export function inStorybook(): boolean { + return '__STORYBOOK_CLIENT_API__' in window +} + /** We issue a cancel request, when the request is aborted or times out (frontend side), since in these cases the backend query might still be running. */ export function shouldCancelQuery(error: any): boolean { return isAbortedRequest(error) || isTimedOutRequest(error) diff --git a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts index 599a1778ba3c2..1b25547e58b1c 100644 --- a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts +++ b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts @@ -125,7 +125,11 @@ export const dataNodeLogic = kea([ } }), actions({ - loadData: (refresh = false, queryId?: string) => ({ refresh, queryId: queryId || uuid() }), + loadData: (refresh = false, alreadyRunningQueryId?: string) => ({ + refresh, + queryId: alreadyRunningQueryId || uuid(), + pollOnly: !!alreadyRunningQueryId, + }), abortAnyRunningQuery: true, abortQuery: (payload: { queryId: string }) => payload, cancelQuery: true, @@ -145,7 +149,7 @@ export const dataNodeLogic = kea([ { setResponse: (response) => response, clearResponse: () => null, - loadData: async ({ refresh: refreshArg, queryId }, breakpoint) => { + loadData: async ({ refresh: refreshArg, queryId, pollOnly }, breakpoint) => { const refresh = props.alwaysRefresh || refreshArg if (props.doNotLoad) { return props.cachedResults @@ -204,7 +208,8 @@ export const dataNodeLogic = kea([ refresh, queryId, actions.setPollResponse, - props.filtersOverride + props.filtersOverride, + pollOnly )) ?? null const duration = performance.now() - now return { data, duration } diff --git a/frontend/src/queries/nodes/DataTable/dataTableLogic.test.ts b/frontend/src/queries/nodes/DataTable/dataTableLogic.test.ts index b822f0c884c5f..e2fc4bde77224 100644 --- a/frontend/src/queries/nodes/DataTable/dataTableLogic.test.ts +++ b/frontend/src/queries/nodes/DataTable/dataTableLogic.test.ts @@ -66,7 +66,8 @@ describe('dataTableLogic', () => { false, expect.any(String), expect.any(Function), - undefined + undefined, + false ) expect(performQuery).toHaveBeenCalledTimes(1) }) diff --git a/frontend/src/queries/nodes/DataTable/queryFeatures.ts b/frontend/src/queries/nodes/DataTable/queryFeatures.ts index 066774effad9a..ae0351ace46b3 100644 --- a/frontend/src/queries/nodes/DataTable/queryFeatures.ts +++ b/frontend/src/queries/nodes/DataTable/queryFeatures.ts @@ -5,6 +5,7 @@ import { isHogQLQuery, isPersonsNode, isSessionAttributionExplorerQuery, + isWebExternalClicksQuery, isWebGoalsQuery, isWebOverviewQuery, isWebStatsTableQuery, @@ -62,6 +63,7 @@ export function getQueryFeatures(query: Node): Set { if ( isWebOverviewQuery(query) || isWebTopClicksQuery(query) || + isWebExternalClicksQuery(query) || isWebStatsTableQuery(query) || isWebGoalsQuery(query) ) { diff --git a/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx b/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx index 9231c69380628..c50d7bbdf153c 100644 --- a/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx +++ b/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx @@ -289,7 +289,7 @@ export function EditorFilters({ query, showing, embedded }: EditorFiltersProps):
{shouldShowSessionAnalysisWarning ? ( - + When using sessions and session properties, events without session IDs will be excluded from the set of results.{' '} Learn more about sessions. diff --git a/frontend/src/queries/query.ts b/frontend/src/queries/query.ts index ec1ca8063e8dc..fb71d16fd963f 100644 --- a/frontend/src/queries/query.ts +++ b/frontend/src/queries/query.ts @@ -7,7 +7,14 @@ import posthog from 'posthog-js' import { OnlineExportContext, QueryExportContext } from '~/types' import { DashboardFilter, DataNode, HogQLQuery, HogQLQueryResponse, NodeKind, PersonsNode, QueryStatus } from './schema' -import { isDataTableNode, isDataVisualizationNode, isHogQLQuery, isInsightVizNode, isPersonsNode } from './utils' +import { + isAsyncResponse, + isDataTableNode, + isDataVisualizationNode, + isHogQLQuery, + isInsightVizNode, + isPersonsNode, +} from './utils' const QUERY_ASYNC_MAX_INTERVAL_SECONDS = 3 const QUERY_ASYNC_TOTAL_POLL_SECONDS = 10 * 60 + 6 // keep in sync with backend-side timeout (currently 10min) + a small buffer @@ -29,7 +36,6 @@ export function queryExportContext( const SYNC_ONLY_QUERY_KINDS = [ 'HogQuery', 'HogQLMetadata', - 'EventsQuery', 'HogQLAutocomplete', 'DatabaseSchemaQuery', 'ErrorTrackingQuery', @@ -39,7 +45,7 @@ export async function pollForResults( queryId: string, showProgress: boolean, methodOptions?: ApiMethodOptions, - callback?: (response: QueryStatus) => void + onPoll?: (response: QueryStatus) => void ): Promise { const pollStart = performance.now() let currentDelay = 300 // start low, because all queries will take at minimum this @@ -50,12 +56,11 @@ export async function pollForResults( try { const statusResponse = (await api.queryStatus.get(queryId, showProgress)).query_status - if (statusResponse.complete) { return statusResponse } - if (callback) { - callback(statusResponse) + if (onPoll) { + onPoll(statusResponse) } } catch (e: any) { e.detail = e.data?.query_status?.error_message @@ -74,27 +79,42 @@ async function executeQuery( refresh?: boolean, queryId?: string, setPollResponse?: (response: QueryStatus) => void, - filtersOverride?: DashboardFilter | null + filtersOverride?: DashboardFilter | null, + /** + * Whether to limit the function to just polling the provided query ID. + * This is important in shared contexts, where we cannot create arbitrary queries via POST – we can only GET. + */ + pollOnly = false ): Promise> { const isAsyncQuery = methodOptions?.async !== false && !SYNC_ONLY_QUERY_KINDS.includes(queryNode.kind) && !!featureFlagLogic.findMounted()?.values.featureFlags?.[FEATURE_FLAGS.QUERY_ASYNC] - const showProgress = !!featureFlagLogic.findMounted()?.values.featureFlags?.[FEATURE_FLAGS.INSIGHT_LOADING_BAR] - const response = await api.query(queryNode, methodOptions, queryId, refresh, isAsyncQuery, filtersOverride) + if (!pollOnly) { + const response = await api.query(queryNode, methodOptions, queryId, refresh, isAsyncQuery, filtersOverride) - if (!response.query_status?.query_async) { - // Executed query synchronously or from cache - return response - } - if (response.query_status?.complete) { - // Async query returned immediately - return response.results - } + if (!isAsyncResponse(response)) { + // Executed query synchronously or from cache + return response + } - const statusResponse = await pollForResults(response.query_status.id, showProgress, methodOptions, setPollResponse) + if (response.query_status.complete) { + // Async query returned immediately + return response.results + } + + queryId = response.query_status.id + } else { + if (!isAsyncQuery) { + throw new Error('pollOnly is only supported for async queries') + } + if (!queryId) { + throw new Error('pollOnly requires a queryId') + } + } + const statusResponse = await pollForResults(queryId, showProgress, methodOptions, setPollResponse) return statusResponse.results } @@ -105,7 +125,8 @@ export async function performQuery( refresh?: boolean, queryId?: string, setPollResponse?: (status: QueryStatus) => void, - filtersOverride?: DashboardFilter | null + filtersOverride?: DashboardFilter | null, + pollOnly = false ): Promise> { let response: NonNullable const logParams: Record = {} @@ -115,7 +136,15 @@ export async function performQuery( if (isPersonsNode(queryNode)) { response = await api.get(getPersonsEndpoint(queryNode), methodOptions) } else { - response = await executeQuery(queryNode, methodOptions, refresh, queryId, setPollResponse, filtersOverride) + response = await executeQuery( + queryNode, + methodOptions, + refresh, + queryId, + setPollResponse, + filtersOverride, + pollOnly + ) if (isHogQLQuery(queryNode) && response && typeof response === 'object') { logParams.clickhouse_sql = (response as HogQLQueryResponse)?.clickhouse } diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json index 00f4bb1801dc3..969d0b01f7fce 100644 --- a/frontend/src/queries/schema.json +++ b/frontend/src/queries/schema.json @@ -494,6 +494,9 @@ { "$ref": "#/definitions/WebStatsTableQuery" }, + { + "$ref": "#/definitions/WebExternalClicksTableQuery" + }, { "$ref": "#/definitions/WebTopClicksQuery" }, @@ -505,6 +508,9 @@ }, { "$ref": "#/definitions/ErrorTrackingQuery" + }, + { + "$ref": "#/definitions/ExperimentResultQuery" } ] }, @@ -1235,6 +1241,9 @@ "description": "Generated HogQL query.", "type": "string" }, + "isUdf": { + "type": "boolean" + }, "is_cached": { "type": "boolean" }, @@ -2029,6 +2038,92 @@ ], "type": "object" }, + "CachedWebExternalClicksTableQueryResponse": { + "additionalProperties": false, + "properties": { + "cache_key": { + "type": "string" + }, + "cache_target_age": { + "format": "date-time", + "type": "string" + }, + "calculation_trigger": { + "description": "What triggered the calculation of the query, leave empty if user/immediate", + "type": "string" + }, + "columns": { + "items": {}, + "type": "array" + }, + "error": { + "description": "Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", + "type": "string" + }, + "hasMore": { + "type": "boolean" + }, + "hogql": { + "description": "Generated HogQL query.", + "type": "string" + }, + "is_cached": { + "type": "boolean" + }, + "last_refresh": { + "format": "date-time", + "type": "string" + }, + "limit": { + "type": "integer" + }, + "modifiers": { + "$ref": "#/definitions/HogQLQueryModifiers", + "description": "Modifiers used when performing the query" + }, + "next_allowed_client_refresh": { + "format": "date-time", + "type": "string" + }, + "offset": { + "type": "integer" + }, + "query_status": { + "$ref": "#/definitions/QueryStatus", + "description": "Query status indicates whether next to the provided data, a query is still running." + }, + "results": { + "items": {}, + "type": "array" + }, + "samplingRate": { + "$ref": "#/definitions/SamplingRate" + }, + "timezone": { + "type": "string" + }, + "timings": { + "description": "Measured timings for different parts of the query generation process", + "items": { + "$ref": "#/definitions/QueryTiming" + }, + "type": "array" + }, + "types": { + "items": {}, + "type": "array" + } + }, + "required": [ + "cache_key", + "is_cached", + "last_refresh", + "next_allowed_client_refresh", + "results", + "timezone" + ], + "type": "object" + }, "CachedWebGoalsQueryResponse": { "additionalProperties": false, "properties": { @@ -2892,6 +2987,60 @@ "required": ["results"], "type": "object" }, + { + "additionalProperties": false, + "properties": { + "columns": { + "items": {}, + "type": "array" + }, + "error": { + "description": "Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", + "type": "string" + }, + "hasMore": { + "type": "boolean" + }, + "hogql": { + "description": "Generated HogQL query.", + "type": "string" + }, + "limit": { + "type": "integer" + }, + "modifiers": { + "$ref": "#/definitions/HogQLQueryModifiers", + "description": "Modifiers used when performing the query" + }, + "offset": { + "type": "integer" + }, + "query_status": { + "$ref": "#/definitions/QueryStatus", + "description": "Query status indicates whether next to the provided data, a query is still running." + }, + "results": { + "items": {}, + "type": "array" + }, + "samplingRate": { + "$ref": "#/definitions/SamplingRate" + }, + "timings": { + "description": "Measured timings for different parts of the query generation process", + "items": { + "$ref": "#/definitions/QueryTiming" + }, + "type": "array" + }, + "types": { + "items": {}, + "type": "array" + } + }, + "required": ["results"], + "type": "object" + }, { "additionalProperties": false, "properties": { @@ -3089,6 +3238,40 @@ }, "required": ["results"], "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "insight": { + "const": "TRENDS", + "type": "string" + }, + "results": { + "additionalProperties": { + "$ref": "#/definitions/ExperimentVariantTrendResult" + }, + "type": "object" + } + }, + "required": ["insight", "results"], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "insight": { + "const": "FUNNELS", + "type": "string" + }, + "results": { + "additionalProperties": { + "$ref": "#/definitions/ExperimentVariantFunnelResult" + }, + "type": "object" + } + }, + "required": ["insight", "results"], + "type": "object" } ] }, @@ -3189,6 +3372,9 @@ { "$ref": "#/definitions/WebStatsTableQuery" }, + { + "$ref": "#/definitions/WebExternalClicksTableQuery" + }, { "$ref": "#/definitions/WebTopClicksQuery" }, @@ -3200,6 +3386,9 @@ }, { "$ref": "#/definitions/ErrorTrackingQuery" + }, + { + "$ref": "#/definitions/ExperimentResultQuery" } ], "description": "Source of the events" @@ -3441,6 +3630,9 @@ "hogql_value": { "type": "string" }, + "id": { + "type": "string" + }, "name": { "type": "string" }, @@ -4309,6 +4501,107 @@ "required": ["columns", "hogql", "results", "types"], "type": "object" }, + "ExperimentResultFunnelQueryResponse": { + "additionalProperties": false, + "properties": { + "insight": { + "const": "FUNNELS", + "type": "string" + }, + "results": { + "additionalProperties": { + "$ref": "#/definitions/ExperimentVariantFunnelResult" + }, + "type": "object" + } + }, + "required": ["insight", "results"], + "type": "object" + }, + "ExperimentResultQuery": { + "additionalProperties": false, + "properties": { + "kind": { + "const": "ExperimentResultQuery", + "type": "string" + }, + "modifiers": { + "$ref": "#/definitions/HogQLQueryModifiers", + "description": "Modifiers used when performing the query" + }, + "response": { + "$ref": "#/definitions/ExperimentResultQueryResponse" + }, + "source": { + "anyOf": [ + { + "$ref": "#/definitions/TrendsQuery" + }, + { + "$ref": "#/definitions/FunnelsQuery" + } + ] + }, + "variants": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "required": ["kind", "source", "variants"], + "type": "object" + }, + "ExperimentResultQueryResponse": { + "anyOf": [ + { + "$ref": "#/definitions/ExperimentResultTrendQueryResponse" + }, + { + "$ref": "#/definitions/ExperimentResultFunnelQueryResponse" + } + ] + }, + "ExperimentResultTrendQueryResponse": { + "additionalProperties": false, + "properties": { + "insight": { + "const": "TRENDS", + "type": "string" + }, + "results": { + "additionalProperties": { + "$ref": "#/definitions/ExperimentVariantTrendResult" + }, + "type": "object" + } + }, + "required": ["insight", "results"], + "type": "object" + }, + "ExperimentVariantFunnelResult": { + "additionalProperties": false, + "properties": { + "failure_count": { + "type": "number" + }, + "success_count": { + "type": "number" + } + }, + "required": ["success_count", "failure_count"], + "type": "object" + }, + "ExperimentVariantTrendResult": { + "additionalProperties": false, + "properties": { + "count": { + "type": "number" + } + }, + "required": ["count"], + "type": "object" + }, "ExperimentalAITrendsQuery": { "additionalProperties": false, "properties": { @@ -4938,6 +5231,9 @@ "layout": { "$ref": "#/definitions/FunnelLayout", "default": "vertical" + }, + "useUdf": { + "type": "boolean" } }, "type": "object" @@ -5084,6 +5380,9 @@ "description": "Generated HogQL query.", "type": "string" }, + "isUdf": { + "type": "boolean" + }, "modifiers": { "$ref": "#/definitions/HogQLQueryModifiers", "description": "Modifiers used when performing the query" @@ -6555,6 +6854,7 @@ "RecordingsQuery", "SessionAttributionExplorerQuery", "ErrorTrackingQuery", + "ExperimentResultQuery", "DataTableNode", "DataVisualizationNode", "SavedInsightNode", @@ -6571,6 +6871,7 @@ "WebOverviewQuery", "WebTopClicksQuery", "WebStatsTableQuery", + "WebExternalClicksTableQuery", "WebGoalsQuery", "DatabaseSchemaQuery" ], @@ -7570,6 +7871,60 @@ "required": ["results"], "type": "object" }, + { + "additionalProperties": false, + "properties": { + "columns": { + "items": {}, + "type": "array" + }, + "error": { + "description": "Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", + "type": "string" + }, + "hasMore": { + "type": "boolean" + }, + "hogql": { + "description": "Generated HogQL query.", + "type": "string" + }, + "limit": { + "type": "integer" + }, + "modifiers": { + "$ref": "#/definitions/HogQLQueryModifiers", + "description": "Modifiers used when performing the query" + }, + "offset": { + "type": "integer" + }, + "query_status": { + "$ref": "#/definitions/QueryStatus", + "description": "Query status indicates whether next to the provided data, a query is still running." + }, + "results": { + "items": {}, + "type": "array" + }, + "samplingRate": { + "$ref": "#/definitions/SamplingRate" + }, + "timings": { + "description": "Measured timings for different parts of the query generation process", + "items": { + "$ref": "#/definitions/QueryTiming" + }, + "type": "array" + }, + "types": { + "items": {}, + "type": "array" + } + }, + "required": ["results"], + "type": "object" + }, { "additionalProperties": false, "properties": { @@ -7768,6 +8123,12 @@ "required": ["results"], "type": "object" }, + { + "$ref": "#/definitions/ExperimentResultTrendQueryResponse" + }, + { + "$ref": "#/definitions/ExperimentResultFunnelQueryResponse" + }, { "properties": {}, "type": "object" @@ -8061,6 +8422,60 @@ "required": ["results"], "type": "object" }, + { + "additionalProperties": false, + "properties": { + "columns": { + "items": {}, + "type": "array" + }, + "error": { + "description": "Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", + "type": "string" + }, + "hasMore": { + "type": "boolean" + }, + "hogql": { + "description": "Generated HogQL query.", + "type": "string" + }, + "limit": { + "type": "integer" + }, + "modifiers": { + "$ref": "#/definitions/HogQLQueryModifiers", + "description": "Modifiers used when performing the query" + }, + "offset": { + "type": "integer" + }, + "query_status": { + "$ref": "#/definitions/QueryStatus", + "description": "Query status indicates whether next to the provided data, a query is still running." + }, + "results": { + "items": {}, + "type": "array" + }, + "samplingRate": { + "$ref": "#/definitions/SamplingRate" + }, + "timings": { + "description": "Measured timings for different parts of the query generation process", + "items": { + "$ref": "#/definitions/QueryTiming" + }, + "type": "array" + }, + "types": { + "items": {}, + "type": "array" + } + }, + "required": ["results"], + "type": "object" + }, { "additionalProperties": false, "properties": { @@ -8259,6 +8674,40 @@ "required": ["results"], "type": "object" }, + { + "additionalProperties": false, + "properties": { + "insight": { + "const": "TRENDS", + "type": "string" + }, + "results": { + "additionalProperties": { + "$ref": "#/definitions/ExperimentVariantTrendResult" + }, + "type": "object" + } + }, + "required": ["insight", "results"], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "insight": { + "const": "FUNNELS", + "type": "string" + }, + "results": { + "additionalProperties": { + "$ref": "#/definitions/ExperimentVariantFunnelResult" + }, + "type": "object" + } + }, + "required": ["insight", "results"], + "type": "object" + }, { "additionalProperties": false, "properties": { @@ -8310,6 +8759,9 @@ "description": "Generated HogQL query.", "type": "string" }, + "isUdf": { + "type": "boolean" + }, "modifiers": { "$ref": "#/definitions/HogQLQueryModifiers", "description": "Modifiers used when performing the query" @@ -8604,6 +9056,9 @@ { "$ref": "#/definitions/WebStatsTableQuery" }, + { + "$ref": "#/definitions/WebExternalClicksTableQuery" + }, { "$ref": "#/definitions/WebTopClicksQuery" }, @@ -8616,6 +9071,9 @@ { "$ref": "#/definitions/ErrorTrackingQuery" }, + { + "$ref": "#/definitions/ExperimentResultQuery" + }, { "$ref": "#/definitions/DataVisualizationNode" }, @@ -10222,6 +10680,109 @@ }, "type": "array" }, + "WebExternalClicksTableQuery": { + "additionalProperties": false, + "properties": { + "dateRange": { + "$ref": "#/definitions/DateRange" + }, + "filterTestAccounts": { + "type": "boolean" + }, + "kind": { + "const": "WebExternalClicksTableQuery", + "type": "string" + }, + "limit": { + "type": "integer" + }, + "modifiers": { + "$ref": "#/definitions/HogQLQueryModifiers", + "description": "Modifiers used when performing the query" + }, + "properties": { + "$ref": "#/definitions/WebAnalyticsPropertyFilters" + }, + "response": { + "$ref": "#/definitions/WebExternalClicksTableQueryResponse" + }, + "sampling": { + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean" + }, + "forceSamplingRate": { + "$ref": "#/definitions/SamplingRate" + } + }, + "type": "object" + }, + "stripQueryParams": { + "type": "boolean" + }, + "useSessionsTable": { + "deprecated": "ignored, always treated as enabled *", + "type": "boolean" + } + }, + "required": ["kind", "properties"], + "type": "object" + }, + "WebExternalClicksTableQueryResponse": { + "additionalProperties": false, + "properties": { + "columns": { + "items": {}, + "type": "array" + }, + "error": { + "description": "Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", + "type": "string" + }, + "hasMore": { + "type": "boolean" + }, + "hogql": { + "description": "Generated HogQL query.", + "type": "string" + }, + "limit": { + "type": "integer" + }, + "modifiers": { + "$ref": "#/definitions/HogQLQueryModifiers", + "description": "Modifiers used when performing the query" + }, + "offset": { + "type": "integer" + }, + "query_status": { + "$ref": "#/definitions/QueryStatus", + "description": "Query status indicates whether next to the provided data, a query is still running." + }, + "results": { + "items": {}, + "type": "array" + }, + "samplingRate": { + "$ref": "#/definitions/SamplingRate" + }, + "timings": { + "description": "Measured timings for different parts of the query generation process", + "items": { + "$ref": "#/definitions/QueryTiming" + }, + "type": "array" + }, + "types": { + "items": {}, + "type": "array" + } + }, + "required": ["results"], + "type": "object" + }, "WebGoalsQuery": { "additionalProperties": false, "properties": { diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts index 351334fb53761..463738bb6de6d 100644 --- a/frontend/src/queries/schema.ts +++ b/frontend/src/queries/schema.ts @@ -74,6 +74,7 @@ export enum NodeKind { RecordingsQuery = 'RecordingsQuery', SessionAttributionExplorerQuery = 'SessionAttributionExplorerQuery', ErrorTrackingQuery = 'ErrorTrackingQuery', + ExperimentResultQuery = 'ExperimentResultQuery', // Interface nodes DataTableNode = 'DataTableNode', @@ -95,6 +96,7 @@ export enum NodeKind { WebOverviewQuery = 'WebOverviewQuery', WebTopClicksQuery = 'WebTopClicksQuery', WebStatsTableQuery = 'WebStatsTableQuery', + WebExternalClicksTableQuery = 'WebExternalClicksTableQuery', WebGoalsQuery = 'WebGoalsQuery', // Database metadata @@ -116,10 +118,12 @@ export type AnyDataNode = | HogQLAutocomplete | WebOverviewQuery | WebStatsTableQuery + | WebExternalClicksTableQuery | WebTopClicksQuery | WebGoalsQuery | SessionAttributionExplorerQuery | ErrorTrackingQuery + | ExperimentResultQuery /** * @discriminator kind @@ -141,10 +145,12 @@ export type QuerySchema = | HogQLAutocomplete | WebOverviewQuery | WebStatsTableQuery + | WebExternalClicksTableQuery | WebTopClicksQuery | WebGoalsQuery | SessionAttributionExplorerQuery | ErrorTrackingQuery + | ExperimentResultQuery // Interface nodes | DataVisualizationNode @@ -560,10 +566,12 @@ export interface DataTableNode | HogQLQuery | WebOverviewQuery | WebStatsTableQuery + | WebExternalClicksTableQuery | WebTopClicksQuery | WebGoalsQuery | SessionAttributionExplorerQuery | ErrorTrackingQuery + | ExperimentResultQuery )['response'] > >, @@ -578,10 +586,12 @@ export interface DataTableNode | HogQLQuery | WebOverviewQuery | WebStatsTableQuery + | WebExternalClicksTableQuery | WebTopClicksQuery | WebGoalsQuery | SessionAttributionExplorerQuery | ErrorTrackingQuery + | ExperimentResultQuery /** Columns shown in the table, unless the `source` provides them. */ columns?: HogQLExpression[] /** Columns that aren't shown in the table, even if in columns or returned data */ @@ -947,6 +957,7 @@ export type FunnelsFilter = { hiddenLegendBreakdowns?: string[] /** @default total */ funnelStepReference?: FunnelsFilterLegacy['funnel_step_reference'] + useUdf?: boolean } export interface FunnelsQuery extends InsightsQueryBase { @@ -973,7 +984,9 @@ export type FunnelTrendsResults = Record[] export interface FunnelsQueryResponse extends AnalyticsQueryResponseBase< FunnelStepsResults | FunnelStepsBreakdownResults | FunnelTimeToConvertResults | FunnelTrendsResults - > {} + > { + isUdf?: boolean +} export type CachedFunnelsQueryResponse = CachedQueryResponse @@ -1408,6 +1421,22 @@ export interface WebStatsTableQueryResponse extends AnalyticsQueryResponseBase +export interface WebExternalClicksTableQuery extends WebAnalyticsQueryBase { + kind: NodeKind.WebExternalClicksTableQuery + limit?: integer + stripQueryParams?: boolean +} +export interface WebExternalClicksTableQueryResponse extends AnalyticsQueryResponseBase { + types?: unknown[] + columns?: unknown[] + hogql?: string + samplingRate?: SamplingRate + hasMore?: boolean + limit?: integer + offset?: integer +} +export type CachedWebExternalClicksTableQueryResponse = CachedQueryResponse + export interface WebGoalsQuery extends WebAnalyticsQueryBase { kind: NodeKind.WebGoalsQuery limit?: integer @@ -1499,6 +1528,33 @@ export type InsightQueryNode = | StickinessQuery | LifecycleQuery +export interface ExperimentVariantTrendResult { + count: number +} + +export interface ExperimentVariantFunnelResult { + success_count: number + failure_count: number +} + +export interface ExperimentResultTrendQueryResponse { + insight: InsightType.TRENDS + results: Record +} + +export interface ExperimentResultFunnelQueryResponse { + insight: InsightType.FUNNELS + results: Record +} + +export type ExperimentResultQueryResponse = ExperimentResultTrendQueryResponse | ExperimentResultFunnelQueryResponse + +export interface ExperimentResultQuery extends DataNode { + kind: NodeKind.ExperimentResultQuery + source: TrendsQuery | FunnelsQuery + variants: string[] +} + /** * @discriminator kind */ @@ -1691,6 +1747,7 @@ export interface DatabaseSchemaField { table?: string fields?: string[] chain?: (string | integer)[] + id?: string } export interface DatabaseSchemaTableCommon { diff --git a/frontend/src/queries/utils.ts b/frontend/src/queries/utils.ts index db8c0d505ae93..ed9cfc8d2fcf1 100644 --- a/frontend/src/queries/utils.ts +++ b/frontend/src/queries/utils.ts @@ -30,6 +30,8 @@ import { NodeKind, PathsQuery, PersonsNode, + QuerySchema, + QueryStatusResponse, RetentionQuery, SavedInsightNode, SessionAttributionExplorerQuery, @@ -78,6 +80,7 @@ export function isDataWarehouseNode(node?: Record | null): node is return node?.kind === NodeKind.DataWarehouseNode } +/** @deprecated `ActorsQuery` is now used instead of `PersonsNode`. */ export function isPersonsNode(node?: Record | null): node is PersonsNode { return node?.kind === NodeKind.PersonsNode } @@ -126,6 +129,10 @@ export function isWebStatsTableQuery(node?: Record | null): node is return node?.kind === NodeKind.WebStatsTableQuery } +export function isWebExternalClicksQuery(node?: Record | null): boolean { + return node?.kind === NodeKind.WebExternalClicksTableQuery +} + export function isWebTopClicksQuery(node?: Record | null): node is WebTopClicksQuery { return node?.kind === NodeKind.WebTopClicksQuery } @@ -200,6 +207,10 @@ export function isQueryForGroup(query: PersonsNode | ActorsQuery): boolean { ) } +export function isAsyncResponse(response: NonNullable): response is QueryStatusResponse { + return 'query_status' in response && response.query_status +} + export function isInsightQueryWithSeries( node?: Node ): node is TrendsQuery | FunnelsQuery | StickinessQuery | LifecycleQuery { diff --git a/frontend/src/scenes/appScenes.ts b/frontend/src/scenes/appScenes.ts index 0824fc5438068..4ed1a46a71d7c 100644 --- a/frontend/src/scenes/appScenes.ts +++ b/frontend/src/scenes/appScenes.ts @@ -38,6 +38,7 @@ export const appScenes: Record any> = { [Scene.Surveys]: () => import('./surveys/Surveys'), [Scene.Survey]: () => import('./surveys/Survey'), [Scene.SurveyTemplates]: () => import('./surveys/SurveyTemplates'), + [Scene.DataModel]: () => import('./data-model/DataModelScene'), [Scene.DataWarehouse]: () => import('./data-warehouse/external/DataWarehouseExternalScene'), [Scene.DataWarehouseTable]: () => import('./data-warehouse/new/NewSourceWizard'), [Scene.DataWarehouseExternal]: () => import('./data-warehouse/external/DataWarehouseExternalScene'), diff --git a/frontend/src/scenes/billing/billingLogic.tsx b/frontend/src/scenes/billing/billingLogic.tsx index 3c3f5505eeb34..971ade1b7d30c 100644 --- a/frontend/src/scenes/billing/billingLogic.tsx +++ b/frontend/src/scenes/billing/billingLogic.tsx @@ -479,8 +479,8 @@ export const billingLogic = kea([ ${productOverLimit.subscribed ? 'increase your billing limit' : 'upgrade your plan'} or ${ productOverLimit.name === 'Data warehouse' - ? 'data will not be synced.' - : 'data loss may occur.' + ? 'data will not be synced' + : 'data loss may occur' }.`, dismissKey: 'usage-limit-exceeded', }) diff --git a/frontend/src/scenes/dashboard/DashboardEditBar.tsx b/frontend/src/scenes/dashboard/DashboardEditBar.tsx index 63744a967d8e2..bd850443ee9c8 100644 --- a/frontend/src/scenes/dashboard/DashboardEditBar.tsx +++ b/frontend/src/scenes/dashboard/DashboardEditBar.tsx @@ -10,12 +10,10 @@ import { groupsModel } from '~/models/groupsModel' import { DashboardMode } from '~/types' export function DashboardEditBar(): JSX.Element { - const { dashboard, canEditDashboard, temporaryFilters, dashboardMode } = useValues(dashboardLogic) + const { dashboard, temporaryFilters, dashboardMode } = useValues(dashboardLogic) const { setDates, setProperties, setDashboardMode } = useActions(dashboardLogic) const { groupsTaxonomicTypes } = useValues(groupsModel) - const disabledReason = !canEditDashboard ? "You don't have permission to edit this dashboard" : undefined - return (
( <> @@ -44,7 +41,6 @@ export function DashboardEditBar(): JSX.Element { )} /> { if (dashboardMode !== DashboardMode.Edit) { setDashboardMode(DashboardMode.Edit, null) diff --git a/frontend/src/scenes/dashboard/DashboardHeader.tsx b/frontend/src/scenes/dashboard/DashboardHeader.tsx index 95bbb0c480e6a..403827fde6668 100644 --- a/frontend/src/scenes/dashboard/DashboardHeader.tsx +++ b/frontend/src/scenes/dashboard/DashboardHeader.tsx @@ -138,7 +138,13 @@ export function DashboardHeader(): JSX.Element | null { setDashboardMode(null, DashboardEventSource.DashboardHeaderSaveDashboard) } tabIndex={10} - disabled={dashboardLoading} + disabledReason={ + dashboardLoading + ? 'Wait for dashboard to finish loading' + : canEditDashboard + ? undefined + : 'Not privileged to edit this dashboard' + } > Save diff --git a/frontend/src/scenes/dashboard/DashboardItems.scss b/frontend/src/scenes/dashboard/DashboardItems.scss index 7107657348f3e..79c3eb0470b0f 100644 --- a/frontend/src/scenes/dashboard/DashboardItems.scss +++ b/frontend/src/scenes/dashboard/DashboardItems.scss @@ -23,8 +23,9 @@ will-change: width, height; } -.react-grid-item.react-draggable .CardMeta { - // .CardMeta is the draggable handle +.react-grid-item.react-draggable .CardMeta, +.react-grid-item.react-draggable .TextCard__body { + // .CardMeta is the draggable handle of insight cards. Text cards are draggable as a whole cursor: move; user-select: none; // Prevent accidental text selection while dragging } diff --git a/frontend/src/scenes/dashboard/DashboardItems.tsx b/frontend/src/scenes/dashboard/DashboardItems.tsx index 2c62f47a6d90c..55f4a1cf0438c 100644 --- a/frontend/src/scenes/dashboard/DashboardItems.tsx +++ b/frontend/src/scenes/dashboard/DashboardItems.tsx @@ -60,7 +60,7 @@ export function DashboardItems(): JSX.Element { ({ index }), + incrementActiveVariableIndex: true, + possiblyIncrementActiveVariableIndex: true, + resetVariable: (variableId: string) => ({ variableId }), + goToNextUntouchedActiveVariableIndex: true, }), reducers({ variables: [ @@ -41,14 +46,64 @@ export const dashboardTemplateVariablesLogic = kea { if (v.name === variableName && filterGroup?.events?.length && filterGroup.events[0]) { - return { ...v, default: filterGroup.events[0] } + return { ...v, default: filterGroup.events[0], touched: true } } - return v + return { ...v } + }) + }, + resetVariable: (state, { variableId }) => { + return state.map((v: DashboardTemplateVariableType) => { + if (v.id === variableId) { + return { ...v, default: FALLBACK_EVENT, touched: false } + } + return { ...v } }) }, }, ], + activeVariableIndex: [ + 0, + { + setActiveVariableIndex: (_, { index }) => index, + incrementActiveVariableIndex: (state) => state + 1, + }, + ], }), + selectors(() => ({ + activeVariable: [ + (s) => [s.variables, s.activeVariableIndex], + (variables: DashboardTemplateVariableType[], activeVariableIndex: number) => { + return variables[activeVariableIndex] + }, + ], + allVariablesAreTouched: [ + (s) => [s.variables], + (variables: DashboardTemplateVariableType[]) => { + return variables.every((v) => v.touched) + }, + ], + })), + listeners(({ actions, props, values }) => ({ + possiblyIncrementActiveVariableIndex: () => { + if (props.variables.length > 0 && values.activeVariableIndex < props.variables.length - 1) { + actions.incrementActiveVariableIndex() + } + }, + goToNextUntouchedActiveVariableIndex: () => { + let nextIndex = values.variables.findIndex((v, i) => !v.touched && i > values.activeVariableIndex) + if (nextIndex !== -1) { + actions.setActiveVariableIndex(nextIndex) + return + } + if (nextIndex == -1) { + nextIndex = values.variables.findIndex((v) => !v.touched) + if (nextIndex == -1) { + nextIndex = values.activeVariableIndex + } + } + actions.setActiveVariableIndex(nextIndex) + }, + })), propsChanged(({ actions, props }, oldProps) => { if (props.variables !== oldProps.variables) { actions.setVariables(props.variables) diff --git a/frontend/src/scenes/data-management/database/databaseTableListLogic.ts b/frontend/src/scenes/data-management/database/databaseTableListLogic.ts index 409d9a2c03983..9718eb83c76b9 100644 --- a/frontend/src/scenes/data-management/database/databaseTableListLogic.ts +++ b/frontend/src/scenes/data-management/database/databaseTableListLogic.ts @@ -109,7 +109,9 @@ export const databaseTableListLogic = kea([ } return Object.values(database.tables) - .filter((n): n is DatabaseSchemaDataWarehouseTable => n.type === 'data_warehouse') + .filter( + (n): n is DatabaseSchemaDataWarehouseTable => n.type === 'data_warehouse' || n.type == 'view' + ) .reduce((acc, cur) => { acc[cur.name] = database.tables[cur.name] as DatabaseSchemaDataWarehouseTable return acc diff --git a/frontend/src/scenes/data-model/DataModelScene.tsx b/frontend/src/scenes/data-model/DataModelScene.tsx new file mode 100644 index 0000000000000..b2e953212ccc4 --- /dev/null +++ b/frontend/src/scenes/data-model/DataModelScene.tsx @@ -0,0 +1,27 @@ +import { useValues } from 'kea' +import { ViewLinkModal } from 'scenes/data-warehouse/ViewLinkModal' +import { SceneExport } from 'scenes/sceneTypes' + +import { dataModelSceneLogic } from './dataModelSceneLogic' +import NodeCanvasWithTable from './NodeCanvasWithTable' + +export const scene: SceneExport = { + component: DataModelScene, + logic: dataModelSceneLogic, +} + +export function DataModelScene(): JSX.Element { + const { simplifiedPersonFields, joinedFieldsAsNodes, allNodes } = useValues(dataModelSceneLogic) + + return ( + <> + + + + ) +} diff --git a/frontend/src/scenes/data-model/Node.tsx b/frontend/src/scenes/data-model/Node.tsx new file mode 100644 index 0000000000000..23415711d9aac --- /dev/null +++ b/frontend/src/scenes/data-model/Node.tsx @@ -0,0 +1,20 @@ +interface NodeProps { + pref: (el: HTMLDivElement | null) => void + width?: string + height?: string + className?: string + children: React.ReactNode +} + +function GenericNode({ pref, className = '', children }: NodeProps): JSX.Element { + return ( +
+ {children} +
+ ) +} + +export default GenericNode diff --git a/frontend/src/scenes/data-model/NodeCanvasWithTable.tsx b/frontend/src/scenes/data-model/NodeCanvasWithTable.tsx new file mode 100644 index 0000000000000..21bee4a55f046 --- /dev/null +++ b/frontend/src/scenes/data-model/NodeCanvasWithTable.tsx @@ -0,0 +1,405 @@ +import clsx from 'clsx' +import { useEffect, useRef, useState } from 'react' + +import GenericNode from './Node' +import { FixedField, JoinedField, TableFields } from './TableFields' +import { Edge, Node, NodePosition, NodePositionWithBounds, NodeWithDepth, Position } from './types' + +const VERTICAL_SPACING = 150 +const HORIZONTAL_SPACING = 250 + +// TODO: Refactor this to be done in the backend +const assignDepths = (nodes: Node[]): NodeWithDepth[] => { + const nodeMap: { [id: string]: NodeWithDepth } = {} + + // Initialize all nodes with depth -1 + nodes.forEach((node) => { + nodeMap[node.nodeId] = { ...node, depth: -1 } + }) + + const assignDepthRecursive = (nodeId: string, currentDepth: number): void => { + const node = nodeMap[nodeId] + if (!node) { + return + } // Skip if node doesn't exist or already processed + + node.depth = currentDepth + + // Process leaf nodes + node.leaf.forEach((leafId) => { + if (nodeMap[leafId]) { + assignDepthRecursive(leafId, currentDepth + 1) + } + }) + } + + // Start assigning depths from each unprocessed node + nodes.forEach((node) => { + if (nodeMap[node.nodeId].depth === -1) { + assignDepthRecursive(node.nodeId, 0) + } + }) + + return Object.values(nodeMap) +} + +const calculateNodePositions = (nodesWithDepth: NodeWithDepth[]): NodePosition[] => { + const padding = 50 + // Order nodes by depth + nodesWithDepth.sort((a, b) => a.depth - b.depth) + + // Create a map to store the next available row for each depth + const depthRowMap: { [key: number]: number } = {} + + // Update node positions based on depth + const nodePositions = nodesWithDepth.map((node) => { + const col = node.depth + + // If this is the first node at this depth, initialize the row + if (depthRowMap[col] === undefined) { + depthRowMap[col] = 0 + } + + // Reset row to match root if new column + if (col > 0 && depthRowMap[col] === 0) { + depthRowMap[col] = depthRowMap[0] - 1 || 0 + } + + const row = depthRowMap[col] + + // Update the next available row for this depth + depthRowMap[col] = row + 1 + + return { + ...node, + position: { + x: padding + col * HORIZONTAL_SPACING, + y: padding + row * VERTICAL_SPACING, + }, + } + }) + + return nodePositions +} + +const calculateTablePosition = (nodePositions: NodePosition[]): Position => { + // Find the node with the maximum x position + const farthestNode = nodePositions.reduce((max, node) => (node.position.x > max.position.x ? node : max)) + + // Calculate the table position to be slightly to the right of the farthest node + const tablePosition: Position = { + x: farthestNode.position.x + 300, // Add some padding + y: 100, // Fixed y position for the table + } + + return tablePosition +} + +const calculateEdges = (nodeRefs: (HTMLDivElement | null)[], nodes: NodePosition[]): Edge[] => { + const nodes_map = nodes.reduce((acc: Record, node) => { + acc[node.nodeId] = node + return acc + }, {}) + + const dfs = (nodeId: string, visited: Set = new Set(), depth: number = 0): Edge[] => { + if (visited.has(nodeId)) { + return [] + } + visited.add(nodeId) + + const node = nodes_map[nodeId] + if (!node) { + return [] + } + + const nodeRef = nodeRefs.find((ref) => ref?.id === nodeId) + if (!nodeRef) { + return [] + } + + const edges: Edge[] = [] + const fromWithBounds = calculateBound(node, nodeRef) + + for (let i = 0; i < node.leaf.length; i++) { + const leafId = node.leaf[i] + const toNode = nodes_map[leafId] + const toRef = nodeRefs.find((ref) => ref?.id === leafId) + + if (toNode && toRef) { + const toWithBounds = calculateBound(toNode, toRef) + const newEdges = calculateEdgesFromTo(fromWithBounds, toWithBounds) + edges.push(...newEdges) + } + + depth = i > 0 ? depth + 1 : depth + edges.push(...dfs(leafId, visited, depth)) + } + + return edges + } + + const edges: Edge[] = [] + + const visited = new Set() + for (const node of nodes) { + if (!visited.has(node.nodeId)) { + edges.push(...dfs(node.nodeId, visited)) + } + } + + return edges +} + +const calculateBound = (node: NodePosition, ref: HTMLDivElement | null): NodePositionWithBounds => { + if (!ref) { + return { + ...node, + left: null, + right: null, + } + } + + const { x, y } = node.position + const { width, height } = ref.getBoundingClientRect() + return { + ...node, + left: { x, y: y + height / 2 }, + right: { x: x + width, y: y + height / 2 }, + } +} + +const calculateEdgesFromTo = (from: NodePositionWithBounds, to: NodePositionWithBounds): Edge[] => { + if (!from.right || !to.left) { + return [] + } + + const edges = [] + edges.push({ + from: from.right, + to: to.left, + }) + + return edges +} + +interface ScrollableDraggableCanvasProps { + nodes: Node[] + fixedFields: FixedField[] + joinedFields: JoinedField[] + tableName: string +} + +const NodeCanvasWithTable = ({ + nodes, + fixedFields, + joinedFields, + tableName, +}: ScrollableDraggableCanvasProps): JSX.Element => { + const canvasRef = useRef(null) + const [isDragging, setIsDragging] = useState(false) + const [offset, setOffset] = useState({ x: 0, y: 0 }) + const [dragStart, setDragStart] = useState({ x: 0, y: 0 }) + const rowsRefs = useRef<(HTMLDivElement | null)[]>(Array(joinedFields.length).fill(null)) + const nodeRefs = useRef<(HTMLDivElement | null)[]>(Array(nodes.length).fill(null)) + const tableNodeRef = useRef(null) + const [nodePositions, setNodePositions] = useState([]) + const [tablePosition, setTablePosition] = useState({ x: 0, y: 0 }) + const [edges, setEdges] = useState([]) + + useEffect(() => { + const nodesWithDepth = assignDepths(nodes) + const nodePositions = calculateNodePositions(nodesWithDepth) + setNodePositions(nodePositions) + const tablePosition = calculateTablePosition(nodePositions) + setTablePosition(tablePosition) + }, [nodes, fixedFields, joinedFields]) + + useEffect(() => { + const allNodes = [...nodePositions] + // calculated table row positions + rowsRefs.current.forEach((ref) => { + const rect = ref?.getBoundingClientRect() + const nodeRect = tableNodeRef.current?.getBoundingClientRect() + + if (!rect) { + return + } + + if (nodeRect && ref) { + allNodes.push({ + nodeId: ref.id, + name: 'Table', + position: { x: tablePosition.x, y: tablePosition.y + (rect.y - nodeRect.y) }, + leaf: [], + depth: -1, + }) + } + }) + + const calculatedEdges = calculateEdges([...nodeRefs.current, ...rowsRefs.current], allNodes) + setEdges(calculatedEdges) + }, [nodePositions, tablePosition]) + + const drawGrid = (ctx: CanvasRenderingContext2D, canvasWidth: number, canvasHeight: number): void => { + ctx.fillStyle = '#000000' + ctx.imageSmoothingEnabled = true + const dotSize = 0.5 + const spacing = 10 + + for (let x = offset.x % spacing; x < canvasWidth; x += spacing) { + for (let y = offset.y % spacing; y < canvasHeight; y += spacing) { + ctx.fillRect(x, y, dotSize, dotSize) + } + } + } + + useEffect(() => { + const canvas = canvasRef.current + + if (canvas) { + const ctx = canvas.getContext('2d') + if (!ctx) { + return + } + const { width, height } = canvas.getBoundingClientRect() + + canvas.width = width + canvas.height = height + + drawGrid(ctx, width, height) + } + + const handleResize = (): void => { + if (canvas) { + const { width, height } = canvas.getBoundingClientRect() + canvas.width = width + canvas.height = height + const ctx = canvas.getContext('2d') + if (ctx) { + drawGrid(ctx, width, height) + } + } + } + + window.addEventListener('resize', handleResize) + + return () => { + window.removeEventListener('resize', handleResize) + } + }, [offset, nodePositions]) + + const handleMouseDown = (e: React.MouseEvent): void => { + setIsDragging(true) + setDragStart({ x: e.clientX - offset.x, y: e.clientY - offset.y }) + } + + const handleMouseMove = (e: React.MouseEvent): void => { + if (!isDragging) { + return + } + const newOffset = { + x: e.clientX - dragStart.x, + y: e.clientY - dragStart.y, + } + setOffset(newOffset) + } + + const handleMouseUp = (): void => { + setIsDragging(false) + } + + return ( +
+ + + {edges.map((edge, index) => { + const controlPoint1X = edge.from.x + offset.x + (edge.to.x - edge.from.x) / 3 + const controlPoint1Y = edge.from.y + offset.y + const controlPoint2X = edge.to.x + offset.x - (edge.to.x - edge.from.x) / 3 + const controlPoint2Y = edge.to.y + offset.y + return ( + + ) + })} + + {nodePositions.map(({ name, position, nodeId }, idx) => { + return ( +
+ { + nodeRefs.current[idx] = el + nodeRefs.current[idx]?.setAttribute('id', nodeId) + }} + > + {name} + +
+ ) + })} + +
+ +
+
+ ) +} + +export default NodeCanvasWithTable + +interface TableFieldNodeProps { + fixedFields: FixedField[] + joinedFields: JoinedField[] + rowsRefs: React.MutableRefObject<(HTMLDivElement | null)[]> + nodeRef: React.MutableRefObject + tableName: string +} + +function TableFieldNode({ nodeRef, rowsRefs, fixedFields, joinedFields, tableName }: TableFieldNodeProps): JSX.Element { + return ( +
+ +
+ ) +} diff --git a/frontend/src/scenes/data-model/TableFields.tsx b/frontend/src/scenes/data-model/TableFields.tsx new file mode 100644 index 0000000000000..5c697da0f1189 --- /dev/null +++ b/frontend/src/scenes/data-model/TableFields.tsx @@ -0,0 +1,100 @@ +import { IconPlus } from '@posthog/icons' +import { LemonButton, LemonTable } from '@posthog/lemon-ui' +import { useActions } from 'kea' +import { viewLinkLogic } from 'scenes/data-warehouse/viewLinkLogic' +export interface FixedField { + column: string + type: string +} + +export interface JoinedField { + nodeId: string + type: string + table: string +} + +export interface TableFieldsProps { + fixedFields: FixedField[] + joinedFields: JoinedField[] + rowsRefs: React.MutableRefObject<(HTMLDivElement | null)[]> + tableName: string +} + +export function TableFields({ fixedFields, joinedFields, rowsRefs, tableName }: TableFieldsProps): JSX.Element { + const { toggleJoinTableModal, selectSourceTable } = useActions(viewLinkLogic) + + return ( +
+
+
+

{tableName}

+
+
+
+
{ + rowsRefs.current[joinedFields.length] = el + rowsRefs.current[joinedFields.length]?.setAttribute('id', 'schema') + }} + className="pl-4 mt-4" + > +

Schema

+
+ column, + }, + { + key: 'type', + render: (_, { type }) => type, + }, + ]} + dataSource={fixedFields} + loading={false} + showHeader={false} + /> +
+
+
+

Joined Tables

+ } + onClick={() => { + selectSourceTable(tableName) + toggleJoinTableModal() + }} + > + Add join + +
+ ( +
{ + rowsRefs.current[idx] = el + rowsRefs.current[idx]?.setAttribute('id', nodeId) + }} + className="flex flex-col" + > + {nodeId} +
+ ), + }, + ]} + loading={false} + showHeader={false} + dataSource={joinedFields} + /> +
+
+ ) +} diff --git a/frontend/src/scenes/data-model/dataModelSceneLogic.tsx b/frontend/src/scenes/data-model/dataModelSceneLogic.tsx new file mode 100644 index 0000000000000..c594ebb2b243a --- /dev/null +++ b/frontend/src/scenes/data-model/dataModelSceneLogic.tsx @@ -0,0 +1,97 @@ +import { actions, connect, kea, listeners, path, reducers, selectors } from 'kea' +import { subscriptions } from 'kea-subscriptions' +import api from 'lib/api' +import { databaseTableListLogic } from 'scenes/data-management/database/databaseTableListLogic' + +import { DatabaseSchemaTable } from '~/queries/schema' +import { DataWarehouseSavedQuery } from '~/types' + +import type { dataModelSceneLogicType } from './dataModelSceneLogicType' +import { Node } from './types' + +export const dataModelSceneLogic = kea([ + path(['scenes', 'data-model', 'dataModelSceneLogic']), + connect(() => ({ + values: [databaseTableListLogic, ['posthogTablesMap', 'viewsMapById']], + })), + actions({ + traverseAncestors: (viewId: DataWarehouseSavedQuery['id']) => ({ viewId }), + setNodes: (nodes: Record) => ({ nodes }), + }), + reducers({ + nodeMap: [ + {} as Record, + { + setNodes: (_, { nodes }) => nodes, + }, + ], + }), + listeners(({ actions, values }) => ({ + traverseAncestors: async ({ viewId }) => { + const result = await api.dataWarehouseSavedQueries.ancestors(viewId) + + result.ancestors.forEach((ancestor) => { + actions.setNodes({ + ...values.nodeMap, + [ancestor]: { + nodeId: ancestor, + name: values.viewsMapById[ancestor]?.name || ancestor, + leaf: [...(values.nodeMap[ancestor]?.leaf || []), viewId], + }, + }) + }) + }, + })), + selectors({ + personFields: [(s) => [s.posthogTablesMap], (posthogTablesMap) => posthogTablesMap['persons']?.fields || []], + simplifiedPersonFields: [ + (s) => [s.personFields], + (personFields) => + Object.entries(personFields) + .filter(([_, data]) => data.type != 'view') + .map(([column, data]) => ({ column, type: data.type })), + ], + joinedFields: [ + (s) => [s.personFields], + (personFields) => + Object.entries(personFields) + .filter(([_, data]) => data.type == 'view') + .map(([_, data]) => data), + ], + joinedFieldsAsNodes: [ + (s) => [s.joinedFields], + (joinedFields) => + joinedFields.map((field) => ({ + nodeId: field.name, + type: 'view', + table: field.name, + })) || [], + ], + allNodes: [ + (s) => [s.nodeMap], + (nodeMap) => [ + { + nodeId: 'posthog', + name: 'PostHog', + leaf: ['schema'], + }, + ...Object.values(nodeMap), + ], + ], + }), + subscriptions(({ actions, values }) => ({ + joinedFields: (joinedFields) => { + joinedFields.forEach((field: DatabaseSchemaTable) => { + actions.setNodes({ + ...values.nodeMap, + [field.id]: { + nodeId: field.id, + name: values.viewsMapById[field.id]?.name || field.id, + leaf: [field.name], + }, + }) + actions.traverseAncestors(field.id) + }) + }, + })), +]) diff --git a/frontend/src/scenes/data-model/types.ts b/frontend/src/scenes/data-model/types.ts new file mode 100644 index 0000000000000..f774137452f04 --- /dev/null +++ b/frontend/src/scenes/data-model/types.ts @@ -0,0 +1,28 @@ +export interface Position { + x: number + y: number +} + +export interface Node { + nodeId: string + name: string + leaf: string[] +} + +export interface NodeWithDepth extends Node { + depth: number +} + +export interface NodePosition extends NodeWithDepth { + position: Position +} + +export interface Edge { + from: Position + to: Position +} + +export interface NodePositionWithBounds extends NodePosition { + left: Position | null + right: Position | null +} diff --git a/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx b/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx index f385f82c5b402..5f4b9fbc628aa 100644 --- a/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx +++ b/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx @@ -567,6 +567,45 @@ export const SOURCE_DETAILS: Record = { ], caption: 'Select an existing Salesforce account to link to PostHog or create a new connection', }, + Vitally: { + name: 'Vitally', + fields: [ + { + name: 'secret_token', + label: 'Secret token', + type: 'text', + required: true, + placeholder: 'sk_live_...', + }, + { + type: 'select', + name: 'region', + label: 'Vitally region', + required: true, + defaultValue: 'EU', + options: [ + { + label: 'EU', + value: 'EU', + }, + { + label: 'US', + value: 'US', + fields: [ + { + name: 'subdomain', + label: 'Vitally subdomain', + type: 'text', + required: true, + placeholder: '', + }, + ], + }, + ], + }, + ], + caption: '', + }, } export const buildKeaFormDefaultFromSourceDetails = ( diff --git a/frontend/src/scenes/data-warehouse/settings/DataWarehouseManagedSourcesTable.tsx b/frontend/src/scenes/data-warehouse/settings/DataWarehouseManagedSourcesTable.tsx index d18657892fd22..4d1fc0f20b4cd 100644 --- a/frontend/src/scenes/data-warehouse/settings/DataWarehouseManagedSourcesTable.tsx +++ b/frontend/src/scenes/data-warehouse/settings/DataWarehouseManagedSourcesTable.tsx @@ -14,6 +14,7 @@ import IconSalesforce from 'public/services/salesforce.png' import IconSnowflake from 'public/services/snowflake.png' import IconMSSQL from 'public/services/sql-azure.png' import IconStripe from 'public/services/stripe.png' +import IconVitally from 'public/services/vitally.png' import IconZendesk from 'public/services/zendesk.png' import { urls } from 'scenes/urls' @@ -189,6 +190,7 @@ export function RenderDataWarehouseSourceIcon({ azure: Iconazure, Salesforce: IconSalesforce, MSSQL: IconMSSQL, + Vitally: IconVitally, }[type] return ( @@ -203,7 +205,7 @@ export function RenderDataWarehouseSourceIcon({ } > - {type} + {type}
diff --git a/frontend/src/scenes/error-tracking/AssigneeSelect.tsx b/frontend/src/scenes/error-tracking/AssigneeSelect.tsx new file mode 100644 index 0000000000000..c45ab1afac27d --- /dev/null +++ b/frontend/src/scenes/error-tracking/AssigneeSelect.tsx @@ -0,0 +1,45 @@ +import { IconPerson } from '@posthog/icons' +import { LemonButton, LemonButtonProps, ProfilePicture } from '@posthog/lemon-ui' +import { MemberSelect } from 'lib/components/MemberSelect' +import { fullName } from 'lib/utils' + +import { ErrorTrackingGroup } from '../../queries/schema' + +export const AssigneeSelect = ({ + assignee, + onChange, + showName = false, + ...buttonProps +}: { + assignee: ErrorTrackingGroup['assignee'] + onChange: (userId: number | null) => void + showName?: boolean +} & Partial>): JSX.Element => { + return ( + { + const assigneeId = user?.id || null + onChange(assigneeId) + }} + > + {(user) => ( + + ) : ( + + ) + } + sideIcon={null} + {...buttonProps} + > + {showName ? {user ? fullName(user) : 'Unassigned'} : null} + + )} + + ) +} diff --git a/frontend/src/scenes/error-tracking/ErrorTrackingGroupScene.tsx b/frontend/src/scenes/error-tracking/ErrorTrackingGroupScene.tsx index 0ba40d7b258ab..715b917e9a206 100644 --- a/frontend/src/scenes/error-tracking/ErrorTrackingGroupScene.tsx +++ b/frontend/src/scenes/error-tracking/ErrorTrackingGroupScene.tsx @@ -1,10 +1,16 @@ import './ErrorTracking.scss' -import { LemonDivider } from '@posthog/lemon-ui' +import { LemonButton, LemonDivider } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { PageHeader } from 'lib/components/PageHeader' import { base64Decode } from 'lib/utils' +import { useEffect } from 'react' import { SceneExport } from 'scenes/sceneTypes' import { SessionPlayerModal } from 'scenes/session-recordings/player/modal/SessionPlayerModal' +import { ErrorTrackingGroup } from '~/queries/schema' + +import { AssigneeSelect } from './AssigneeSelect' import ErrorTrackingFilters from './ErrorTrackingFilters' import { errorTrackingGroupSceneLogic } from './errorTrackingGroupSceneLogic' import { OverviewTab } from './groups/OverviewTab' @@ -17,9 +23,62 @@ export const scene: SceneExport = { }), } +const STATUS_LABEL: Record = { + active: 'Active', + archived: 'Archived', + resolved: 'Resolved', + pending_release: 'Pending release', +} + export function ErrorTrackingGroupScene(): JSX.Element { + const { group, groupLoading } = useValues(errorTrackingGroupSceneLogic) + const { updateGroup, loadGroup } = useActions(errorTrackingGroupSceneLogic) + + useEffect(() => { + // don't like doing this but scene logics do not unmount after being loaded + // so this refreshes the group on each page visit in case any changes occurred + if (!groupLoading) { + loadGroup() + } + }, []) + return ( <> + + updateGroup({ assignee })} + type="secondary" + showName + /> +
+ updateGroup({ status: 'archived' })}> + Archive + + updateGroup({ status: 'resolved' })}> + Resolve + +
+
+ ) : ( + updateGroup({ status: 'active' })} + tooltip="Mark as active" + > + {STATUS_LABEL[group.status]} + + ) + ) : ( + false + ) + } + /> diff --git a/frontend/src/scenes/error-tracking/ErrorTrackingScene.tsx b/frontend/src/scenes/error-tracking/ErrorTrackingScene.tsx index 0b0a6db0a9fb9..2024b835a8188 100644 --- a/frontend/src/scenes/error-tracking/ErrorTrackingScene.tsx +++ b/frontend/src/scenes/error-tracking/ErrorTrackingScene.tsx @@ -1,10 +1,8 @@ import { TZLabel } from '@posthog/apps-common' -import { IconPerson } from '@posthog/icons' -import { LemonButton, LemonCheckbox, LemonDivider, LemonSegmentedButton, ProfilePicture } from '@posthog/lemon-ui' +import { LemonButton, LemonCheckbox, LemonDivider, LemonSegmentedButton } from '@posthog/lemon-ui' import clsx from 'clsx' import { BindLogic, useActions, useValues } from 'kea' import { FeedbackNotice } from 'lib/components/FeedbackNotice' -import { MemberSelect } from 'lib/components/MemberSelect' import { LemonTableLink } from 'lib/lemon-ui/LemonTable/LemonTableLink' import { SceneExport } from 'scenes/sceneTypes' import { urls } from 'scenes/urls' @@ -15,6 +13,7 @@ import { ErrorTrackingGroup } from '~/queries/schema' import { QueryContext, QueryContextColumnComponent, QueryContextColumnTitleComponent } from '~/queries/types' import { InsightLogicProps } from '~/types' +import { AssigneeSelect } from './AssigneeSelect' import { errorTrackingDataNodeLogic } from './errorTrackingDataNodeLogic' import ErrorTrackingFilters from './ErrorTrackingFilters' import { errorTrackingLogic } from './errorTrackingLogic' @@ -40,8 +39,10 @@ export function ErrorTrackingScene(): JSX.Element { render: CustomGroupTitleColumn, }, occurrences: { align: 'center' }, + sessions: { align: 'center' }, + users: { align: 'center' }, volume: { renderTitle: CustomVolumeColumnHeader }, - assignee: { render: AssigneeColumn, align: 'center' }, + assignee: { render: AssigneeColumn }, }, showOpenEditorButton: false, insightProps: insightProps, @@ -151,26 +152,11 @@ const AssigneeColumn: QueryContextColumnComponent = (props) => { const record = props.record as ErrorTrackingGroup return ( - { - const assigneeId = user?.id || null - assignGroup(props.recordIndex, assigneeId) - }} - > - {(user) => ( - - ) : ( - - ) - } - /> - )} - +
+ assignGroup(props.recordIndex, assigneeId)} + /> +
) } diff --git a/frontend/src/scenes/error-tracking/errorTrackingGroupSceneLogic.ts b/frontend/src/scenes/error-tracking/errorTrackingGroupSceneLogic.ts index af9ba29f04255..4279f0456f80a 100644 --- a/frontend/src/scenes/error-tracking/errorTrackingGroupSceneLogic.ts +++ b/frontend/src/scenes/error-tracking/errorTrackingGroupSceneLogic.ts @@ -1,4 +1,4 @@ -import { actions, afterMount, connect, kea, listeners, path, props, reducers, selectors } from 'kea' +import { actions, connect, kea, listeners, path, props, reducers, selectors } from 'kea' import { loaders } from 'kea-loaders' import { actionToUrl, router, urlToAction } from 'kea-router' import api from 'lib/api' @@ -45,6 +45,7 @@ export const errorTrackingGroupSceneLogic = kea ({ tab }), setActiveEventUUID: (uuid: ErrorTrackingEvent['uuid']) => ({ uuid }), + updateGroup: (group: Partial>) => ({ group }), }), reducers(() => ({ @@ -73,13 +74,20 @@ export const errorTrackingGroupSceneLogic = kea { + const response = await api.errorTracking.update(props.fingerprint, group) + return { ...values.group, ...response } + }, }, ], events: [ @@ -166,8 +174,4 @@ export const errorTrackingGroupSceneLogic = kea { - actions.loadGroup() - }), ]) diff --git a/frontend/src/scenes/funnels/funnelUtils.ts b/frontend/src/scenes/funnels/funnelUtils.ts index da23089d9aad5..931935415cc3a 100644 --- a/frontend/src/scenes/funnels/funnelUtils.ts +++ b/frontend/src/scenes/funnels/funnelUtils.ts @@ -389,7 +389,7 @@ export function flattenedStepsByBreakdown( breakdown_value: 'Baseline', })), conversionRates: { - total: (lastStep?.count ?? 0) / (baseStep?.count ?? 1), + total: (lastStep?.count || 0) / (baseStep?.count || 1), }, }) } diff --git a/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx b/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx index 9e3b25bc2bddc..0c268dee77e6e 100644 --- a/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx +++ b/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx @@ -1,8 +1,14 @@ import './EmptyStates.scss' -// eslint-disable-next-line no-restricted-imports -import { PlusCircleOutlined, ThunderboltFilled } from '@ant-design/icons' -import { IconArchive, IconInfo, IconPlus, IconWarning } from '@posthog/icons' +import { + IconArchive, + IconInfo, + IconPieChart, + IconPlus, + IconPlusSmall, + IconPlusSquare, + IconWarning, +} from '@posthog/icons' import { LemonButton } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { AnimationType } from 'lib/animations/animations' @@ -69,7 +75,7 @@ function SamplingLink({ insightProps }: { insightProps: InsightLogicProps }): JS }) }} > - {suggestedSamplingPercentage}% sampling + {suggestedSamplingPercentage}% sampling ) @@ -341,10 +347,10 @@ export function FunnelSingleStepState({ actionable = true }: FunnelSingleStepSta
- +

Add another step!

-

+

You’re almost there! Funnels require at least two steps before calculating. {actionable && ' Once you have two steps defined, additional changes will recalculate automatically.'} @@ -480,7 +486,7 @@ export function SavedInsightsEmptyState(): JSX.Element { } + icon={} className="add-insight-button" > New insight diff --git a/frontend/src/scenes/max/Max.tsx b/frontend/src/scenes/max/Max.tsx index 594ca3344aed3..65909db171a5a 100644 --- a/frontend/src/scenes/max/Max.tsx +++ b/frontend/src/scenes/max/Max.tsx @@ -3,13 +3,13 @@ import './Max.scss' import { LemonButton, LemonInput, Spinner } from '@posthog/lemon-ui' import clsx from 'clsx' import { useActions, useValues } from 'kea' -import { HedgehogBuddyStatic } from 'lib/components/HedgehogBuddy/HedgehogBuddyRender' +import { HedgehogBuddy } from 'lib/components/HedgehogBuddy/HedgehogBuddy' +import { hedgehogBuddyLogic } from 'lib/components/HedgehogBuddy/hedgehogBuddyLogic' import { FEATURE_FLAGS } from 'lib/constants' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { uuid } from 'lib/utils' import React, { useState } from 'react' import { SceneExport } from 'scenes/sceneTypes' -import { userLogic } from 'scenes/userLogic' import { queryNodeToFilter } from '~/queries/nodes/InsightQuery/utils/queryNodeToFilter' import { Query } from '~/queries/Query/Query' @@ -41,8 +41,8 @@ function Message({ } export function Max(): JSX.Element | null { - const { user } = useValues(userLogic) const { featureFlags } = useValues(featureFlagLogic) + const { hedgehogConfig } = useValues(hedgehogBuddyLogic) const logic = maxLogic({ sessionId: uuid(), @@ -118,12 +118,16 @@ export function Max(): JSX.Element | null { )}

-
- + actor.setAnimation('wave')} + onActorLoaded={(actor) => actor.setAnimation('wave')} />
- +

+ For each action below, select an element on your site that indicates when that action is + taken, or enter a custom event name that you'll send using{' '} + + posthog.capture() + {' '} + (no need to send it now) . +

+ Create dashboard diff --git a/frontend/src/scenes/onboarding/productAnalyticsSteps/DashboardTemplateVariables.tsx b/frontend/src/scenes/onboarding/productAnalyticsSteps/DashboardTemplateVariables.tsx new file mode 100644 index 0000000000000..e22f822eedcfe --- /dev/null +++ b/frontend/src/scenes/onboarding/productAnalyticsSteps/DashboardTemplateVariables.tsx @@ -0,0 +1,186 @@ +import { IconCheckCircle, IconInfo, IconTrash } from '@posthog/icons' +import { LemonBanner, LemonButton, LemonCollapse, LemonInput, LemonLabel } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { useEffect, useState } from 'react' +import { dashboardTemplateVariablesLogic } from 'scenes/dashboard/dashboardTemplateVariablesLogic' +import { newDashboardLogic } from 'scenes/dashboard/newDashboardLogic' + +import { DashboardTemplateVariableType } from '~/types' + +function VariableSelector({ + variable, + hasSelectedSite, +}: { + variable: DashboardTemplateVariableType + hasSelectedSite: boolean +}): JSX.Element { + const { activeDashboardTemplate } = useValues(newDashboardLogic) + const theDashboardTemplateVariablesLogic = dashboardTemplateVariablesLogic({ + variables: activeDashboardTemplate?.variables || [], + }) + const { setVariable, resetVariable, goToNextUntouchedActiveVariableIndex, incrementActiveVariableIndex } = + useActions(theDashboardTemplateVariablesLogic) + const { allVariablesAreTouched, variables, activeVariableIndex } = useValues(theDashboardTemplateVariablesLogic) + const [customEventName, setCustomEventName] = useState(null) + const [showCustomEventField, setShowCustomEventField] = useState(false) + + const FALLBACK_EVENT = { + id: '$other_event', + math: 'dau', + type: 'events', + } + + return ( +
+
+

+ {variable.description} +

+
+ {variable.touched && !customEventName && ( +
+
+ {' '} + Selected +

.md-invite-button

+
+
+ } + type="tertiary" + size="small" + onClick={() => resetVariable(variable.id)} + /> +
+
+ )} + {showCustomEventField && ( +
+ Custom event name +

+ Set the name that you'll use for a custom event (eg. a backend event) instead of selecting an + event from your site. You can change this later if needed. +

+
+ { + if (v) { + setCustomEventName(v) + setVariable(variable.name, { + events: [{ id: v, math: 'dau', type: 'events' }], + }) + } else { + setCustomEventName(null) + resetVariable(variable.id) + } + }} + onBlur={() => { + if (customEventName) { + setVariable(variable.name, { + events: [{ id: customEventName, math: 'dau', type: 'events' }], + }) + } else { + resetVariable(variable.id) + setShowCustomEventField(false) + } + }} + /> +
+ } + type="tertiary" + size="small" + onClick={() => { + resetVariable(variable.id) + setCustomEventName(null) + setShowCustomEventField(false) + }} + /> +
+
+
+ )} + {!hasSelectedSite ? ( + Please select a site to continue. + ) : ( +
+ {variable.touched ? ( + <> + {!allVariablesAreTouched || + (allVariablesAreTouched && variables.length !== activeVariableIndex + 1) ? ( + + !allVariablesAreTouched + ? goToNextUntouchedActiveVariableIndex() + : variables.length !== activeVariableIndex + 1 + ? incrementActiveVariableIndex() + : null + } + > + Continue + + ) : null} + + ) : ( +
+ { + setShowCustomEventField(false) + setVariable(variable.name, { events: [FALLBACK_EVENT] }) + }} + > + Select from site + + setShowCustomEventField(true)}> + Use custom event + +
+ )} +
+ )} +
+ ) +} + +export function DashboardTemplateVariables({ hasSelectedSite }: { hasSelectedSite: boolean }): JSX.Element { + const { activeDashboardTemplate } = useValues(newDashboardLogic) + const theDashboardTemplateVariablesLogic = dashboardTemplateVariablesLogic({ + variables: activeDashboardTemplate?.variables || [], + }) + const { variables, activeVariableIndex } = useValues(theDashboardTemplateVariablesLogic) + const { setVariables, setActiveVariableIndex } = useActions(theDashboardTemplateVariablesLogic) + + // TODO: onboarding-dashboard-templates: this is a hack, I'm not sure why it's not set properly initially. + useEffect(() => { + setVariables(activeDashboardTemplate?.variables || []) + }, [activeDashboardTemplate]) + + return ( +
+ ({ + key: v.id, + header: ( +
+ {v.name} + {v.touched && } +
+ ), + content: , + className: 'p-4 bg-white', + onHeaderClick: () => { + setActiveVariableIndex(i) + }, + }))} + embedded + size="small" + /> +
+ ) +} diff --git a/frontend/src/scenes/persons/RelatedFeatureFlags.tsx b/frontend/src/scenes/persons/RelatedFeatureFlags.tsx index 2b673f455c386..40599b218a591 100644 --- a/frontend/src/scenes/persons/RelatedFeatureFlags.tsx +++ b/frontend/src/scenes/persons/RelatedFeatureFlags.tsx @@ -1,4 +1,5 @@ -import { LemonInput, LemonSelect, LemonSnack, LemonTable, LemonTag } from '@posthog/lemon-ui' +import { IconInfo } from '@posthog/icons' +import { LemonInput, LemonSelect, LemonSnack, LemonTable, LemonTag, Link, Tooltip } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { LemonTableColumns } from 'lib/lemon-ui/LemonTable' import { LemonTableLink } from 'lib/lemon-ui/LemonTable/LemonTableLink' @@ -89,7 +90,33 @@ export function RelatedFeatureFlags({ distinctId, groups }: Props): JSX.Element }, }, { - title: 'Match evaluation', + title: ( +
+
Match evaluation
+ +
+ This column simulates the feature flag evaluation based on the selected distinct ID, + current properties, and groups associated with the user. If the actual flag value + differs, it could be due to different inputs used during evaluation. +
+
+ If you are using local flag evaluation, you must ensure that you provide any person + properties, groups, or group properties used to evaluate the release conditions of + the flag. Read more in the{' '} + + documentation. + +
+
+ } + closeDelayMs={200} + > + + +
+ ), dataIndex: 'evaluation', width: 150, render: function Render(_, featureFlag: RelatedFeatureFlag) { diff --git a/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.tsx b/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.tsx index dacfed655a513..2c46f62d4bf4b 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.tsx +++ b/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.tsx @@ -36,6 +36,7 @@ import { PipelineTab, PropertyFilterType, PropertyGroupFilter, + PropertyGroupFilterValue, } from '~/types' import { EmailTemplate } from './email-templater/emailTemplaterLogic' @@ -448,10 +449,14 @@ export const hogFunctionConfigurationLogic = kea [s.configuration], (configuration): TrendsQuery => { - const properties: PropertyGroupFilter = { + const seriesProperties: PropertyGroupFilterValue = { type: FilterLogicalOperator.Or, values: [], } + const properties: PropertyGroupFilter = { + type: FilterLogicalOperator.And, + values: [seriesProperties], + } for (const event of configuration.filters?.events ?? []) { const eventProperties: AnyPropertyFilter[] = [...(event.properties ?? [])] if (event.id) { @@ -466,7 +471,7 @@ export const hogFunctionConfigurationLogic = kea 0) { + const globalProperties: PropertyGroupFilterValue = { + type: FilterLogicalOperator.And, + values: [], + } + for (const property of configuration.filters?.properties ?? []) { + globalProperties.values.push(property as AnyPropertyFilter) + } + properties.values.push(globalProperties) + } return { kind: NodeKind.TrendsQuery, diff --git a/frontend/src/scenes/saved-insights/SavedInsights.scss b/frontend/src/scenes/saved-insights/SavedInsights.scss index 768d883c174eb..bc9f2a281daaf 100644 --- a/frontend/src/scenes/saved-insights/SavedInsights.scss +++ b/frontend/src/scenes/saved-insights/SavedInsights.scss @@ -42,11 +42,6 @@ line-height: 1.7rem; color: var(--muted); } - - button.add-insight-button { - height: 50px; - padding: 0 3rem; - } } } } diff --git a/frontend/src/scenes/saved-insights/SavedInsights.tsx b/frontend/src/scenes/saved-insights/SavedInsights.tsx index 1ef05e3b953aa..e1c68fb381ef7 100644 --- a/frontend/src/scenes/saved-insights/SavedInsights.tsx +++ b/frontend/src/scenes/saved-insights/SavedInsights.tsx @@ -4,6 +4,7 @@ import { IconBrackets, IconCorrelationAnalysis, IconCursor, + IconFlask, IconFunnels, IconGraph, IconHogQL, @@ -304,6 +305,12 @@ export const QUERY_TYPES_METADATA: Record = { icon: IconPieChart, inMenu: true, }, + [NodeKind.WebExternalClicksTableQuery]: { + name: 'External click urls', + description: 'View clicks on external links', + icon: IconPieChart, + inMenu: true, + }, [NodeKind.HogQuery]: { name: 'Hog', description: 'Hog query', @@ -328,6 +335,12 @@ export const QUERY_TYPES_METADATA: Record = { icon: IconVideoCamera, inMenu: false, }, + [NodeKind.ExperimentResultQuery]: { + name: 'Experiment Result', + description: 'View experiment result', + icon: IconFlask, + inMenu: false, + }, } export const INSIGHT_TYPE_OPTIONS: LemonSelectOptions = [ diff --git a/frontend/src/scenes/sceneTypes.ts b/frontend/src/scenes/sceneTypes.ts index adddb012a10b3..5624d8b2028d1 100644 --- a/frontend/src/scenes/sceneTypes.ts +++ b/frontend/src/scenes/sceneTypes.ts @@ -43,6 +43,7 @@ export enum Scene { Survey = 'Survey', SurveyTemplates = 'SurveyTemplates', DataWarehouse = 'DataWarehouse', + DataModel = 'DataModel', DataWarehouseExternal = 'DataWarehouseExternal', DataWarehouseTable = 'DataWarehouseTable', DataWarehouseRedirect = 'DataWarehouseRedirect', @@ -125,7 +126,7 @@ export interface SceneConfig { * If `plain`, there's no navigation present, and the scene has no padding. * @default 'app' */ - layout?: 'app' | 'app-raw' | 'app-container' | 'plain' + layout?: 'app' | 'app-raw' | 'app-canvas' | 'app-container' | 'plain' /** Hides project notice (ProjectNotice.tsx). */ hideProjectNotice?: boolean /** Hides billing notice (BillingAlertsV2.tsx). */ diff --git a/frontend/src/scenes/scenes.ts b/frontend/src/scenes/scenes.ts index 7581e424bd2fd..036efe7ae5940 100644 --- a/frontend/src/scenes/scenes.ts +++ b/frontend/src/scenes/scenes.ts @@ -215,6 +215,12 @@ export const sceneConfigurations: Record = { name: 'New survey', defaultDocsPath: '/docs/surveys/creating-surveys', }, + [Scene.DataModel]: { + projectBased: true, + name: 'Visualize person schema', + defaultDocsPath: '/docs/data-datawarehouse', + layout: 'app-canvas', + }, [Scene.DataWarehouse]: { projectBased: true, name: 'Data warehouse', @@ -514,6 +520,7 @@ export const routes: Record = { [urls.surveys()]: Scene.Surveys, [urls.survey(':id')]: Scene.Survey, [urls.surveyTemplates()]: Scene.SurveyTemplates, + [urls.dataModel()]: Scene.DataModel, [urls.dataWarehouse()]: Scene.DataWarehouse, [urls.dataWarehouseView(':id')]: Scene.DataWarehouse, [urls.dataWarehouseTable()]: Scene.DataWarehouseTable, diff --git a/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx b/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx index a58bc3e739ead..b677c8fec155e 100644 --- a/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx +++ b/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx @@ -170,7 +170,7 @@ const MenuActions = (): JSX.Element => { useActions(sessionRecordingPlayerLogic) const { fetchSimilarRecordings } = useActions(sessionRecordingDataLogic(logicProps)) - const hasMobileExport = useFeatureFlag('SESSION_REPLAY_EXPORT_MOBILE_DATA') + const hasMobileExport = window.IMPERSONATED_SESSION || useFeatureFlag('SESSION_REPLAY_EXPORT_MOBILE_DATA') const hasSimilarRecordings = useFeatureFlag('REPLAY_SIMILAR_RECORDINGS') const onDelete = (): void => { diff --git a/frontend/src/scenes/session-recordings/player/__mocks__/encoded-snapshot-data.ts b/frontend/src/scenes/session-recordings/player/__mocks__/encoded-snapshot-data.ts new file mode 100644 index 0000000000000..f522be7364dd4 --- /dev/null +++ b/frontend/src/scenes/session-recordings/player/__mocks__/encoded-snapshot-data.ts @@ -0,0 +1,4 @@ +export const encodedWebSnapshotData: string[] = [ + '{"window_id":"0191c366-dd75-708c-bb41-c0d5bd2bb0dc","data":[{"type":4,"data":{"href":"http://localhost:8000/project/1","width":719,"height":914},"timestamp":1725560859629},{"type":3,"data":{"source":2,"type":0,"id":320,"x":21.41059112548828,"y":28.776042938232422},"timestamp":1725560861395},{"type":3,"data":{"source":2,"type":2,"id":320,"x":21,"y":28,"pointerType":0},"timestamp":1725560861398},{"type":3,"data":{"source":0,"texts":[],"attributes":[{"id":59,"attributes":{"class":"Navbar3000"}},{"id":313,"attributes":{"class":"Navbar3000__overlay"}}],"removes":[],"adds":[]},"timestamp":1725560861402}]}', + '{"window_id":"0191c366-dd75-708c-bb41-c0d5bd2bb0dc","data":[{"type":4,"data":{"href":"http://localhost:8000/project/1","width":719,"height":914},"timestamp":1725560859629},{"type":3,"data":{"source":2,"type":0,"id":320,"x":21.41059112548828,"y":28.776042938232422},"timestamp":1725560861395},{"type":3,"data":{"source":2,"type":2,"id":320,"x":21,"y":28,"pointerType":0},"timestamp":1725560861398},{"type":3,"data":{"source":0,"texts":[],"attributes":[{"id":59,"attributes":{"class":"Navbar3000"}},{"id":313,"attributes":{"class":"Navbar3000__overlay"}}],"removes":[],"adds":[]},"timestamp":1725560861402}]}', +] diff --git a/frontend/src/scenes/session-recordings/player/__snapshots__/sessionRecordingDataLogic.test.ts.snap b/frontend/src/scenes/session-recordings/player/__snapshots__/sessionRecordingDataLogic.test.ts.snap index 6fda01f36439f..1dfae7ab19d64 100644 --- a/frontend/src/scenes/session-recordings/player/__snapshots__/sessionRecordingDataLogic.test.ts.snap +++ b/frontend/src/scenes/session-recordings/player/__snapshots__/sessionRecordingDataLogic.test.ts.snap @@ -2078,3 +2078,193 @@ exports[`sessionRecordingDataLogic deduplicateSnapshots should match snapshot 1` }, ] `; + +exports[`sessionRecordingDataLogic snapshot parsing handles data with unparseable lines 1`] = ` +[ + { + "data": { + "height": 914, + "href": "http://localhost:8000/project/1", + "width": 719, + }, + "timestamp": 1725560859629, + "type": 4, + "windowId": "0191c366-dd75-708c-bb41-c0d5bd2bb0dc", + }, + { + "data": { + "id": 320, + "source": 2, + "type": 0, + "x": 21.41059112548828, + "y": 28.776042938232422, + }, + "timestamp": 1725560861395, + "type": 3, + "windowId": "0191c366-dd75-708c-bb41-c0d5bd2bb0dc", + }, + { + "data": { + "id": 320, + "pointerType": 0, + "source": 2, + "type": 2, + "x": 21, + "y": 28, + }, + "timestamp": 1725560861398, + "type": 3, + "windowId": "0191c366-dd75-708c-bb41-c0d5bd2bb0dc", + }, + { + "data": { + "adds": [], + "attributes": [ + { + "attributes": { + "class": "Navbar3000", + }, + "id": 59, + }, + { + "attributes": { + "class": "Navbar3000__overlay", + }, + "id": 313, + }, + ], + "removes": [], + "source": 0, + "texts": [], + }, + "timestamp": 1725560861402, + "type": 3, + "windowId": "0191c366-dd75-708c-bb41-c0d5bd2bb0dc", + }, +] +`; + +exports[`sessionRecordingDataLogic snapshot parsing handles normal web data 1`] = ` +[ + { + "data": { + "height": 914, + "href": "http://localhost:8000/project/1", + "width": 719, + }, + "timestamp": 1725560859629, + "type": 4, + "windowId": "0191c366-dd75-708c-bb41-c0d5bd2bb0dc", + }, + { + "data": { + "id": 320, + "source": 2, + "type": 0, + "x": 21.41059112548828, + "y": 28.776042938232422, + }, + "timestamp": 1725560861395, + "type": 3, + "windowId": "0191c366-dd75-708c-bb41-c0d5bd2bb0dc", + }, + { + "data": { + "id": 320, + "pointerType": 0, + "source": 2, + "type": 2, + "x": 21, + "y": 28, + }, + "timestamp": 1725560861398, + "type": 3, + "windowId": "0191c366-dd75-708c-bb41-c0d5bd2bb0dc", + }, + { + "data": { + "adds": [], + "attributes": [ + { + "attributes": { + "class": "Navbar3000", + }, + "id": 59, + }, + { + "attributes": { + "class": "Navbar3000__overlay", + }, + "id": 313, + }, + ], + "removes": [], + "source": 0, + "texts": [], + }, + "timestamp": 1725560861402, + "type": 3, + "windowId": "0191c366-dd75-708c-bb41-c0d5bd2bb0dc", + }, + { + "data": { + "height": 914, + "href": "http://localhost:8000/project/1", + "width": 719, + }, + "timestamp": 1725560859629, + "type": 4, + "windowId": "0191c366-dd75-708c-bb41-c0d5bd2bb0dc", + }, + { + "data": { + "id": 320, + "source": 2, + "type": 0, + "x": 21.41059112548828, + "y": 28.776042938232422, + }, + "timestamp": 1725560861395, + "type": 3, + "windowId": "0191c366-dd75-708c-bb41-c0d5bd2bb0dc", + }, + { + "data": { + "id": 320, + "pointerType": 0, + "source": 2, + "type": 2, + "x": 21, + "y": 28, + }, + "timestamp": 1725560861398, + "type": 3, + "windowId": "0191c366-dd75-708c-bb41-c0d5bd2bb0dc", + }, + { + "data": { + "adds": [], + "attributes": [ + { + "attributes": { + "class": "Navbar3000", + }, + "id": 59, + }, + { + "attributes": { + "class": "Navbar3000__overlay", + }, + "id": 313, + }, + ], + "removes": [], + "source": 0, + "texts": [], + }, + "timestamp": 1725560861402, + "type": 3, + "windowId": "0191c366-dd75-708c-bb41-c0d5bd2bb0dc", + }, +] +`; diff --git a/frontend/src/scenes/session-recordings/player/controller/PlayerSeekbarPreview.tsx b/frontend/src/scenes/session-recordings/player/controller/PlayerSeekbarPreview.tsx index 2b079dba9583d..80f17633c1fc3 100644 --- a/frontend/src/scenes/session-recordings/player/controller/PlayerSeekbarPreview.tsx +++ b/frontend/src/scenes/session-recordings/player/controller/PlayerSeekbarPreview.tsx @@ -1,10 +1,12 @@ import { BindLogic, useActions, useValues } from 'kea' +import { Dayjs } from 'lib/dayjs' import useIsHovering from 'lib/hooks/useIsHovering' import { colonDelimitedDuration } from 'lib/utils' import { memo, MutableRefObject, useEffect, useRef, useState } from 'react' import { useDebouncedCallback } from 'use-debounce' import { PlayerFrame } from '../PlayerFrame' +import { TimestampFormat } from '../playerSettingsLogic' import { sessionRecordingPlayerLogic, SessionRecordingPlayerLogicProps, @@ -18,6 +20,8 @@ export type PlayerSeekbarPreviewProps = { maxMs: number seekBarRef: MutableRefObject activeMs: number | null + timestampFormat: TimestampFormat + startTime: Dayjs | null } const PlayerSeekbarPreviewFrame = ({ @@ -27,7 +31,7 @@ const PlayerSeekbarPreviewFrame = ({ isVisible, }: { percentage: number; isVisible: boolean } & Omit< PlayerSeekbarPreviewProps, - 'seekBarRef' | 'activeMs' + 'seekBarRef' | 'activeMs' | 'timestampFormat' | 'startTime' >): JSX.Element | null => { const { sessionRecordingId, logicProps } = useValues(sessionRecordingPlayerLogic) @@ -66,11 +70,28 @@ const PlayerSeekbarPreviewFrame = ({ ) } -function _PlayerSeekbarPreview({ minMs, maxMs, seekBarRef, activeMs }: PlayerSeekbarPreviewProps): JSX.Element { +function _PlayerSeekbarPreview({ + minMs, + maxMs, + seekBarRef, + activeMs, + timestampFormat, + startTime, +}: PlayerSeekbarPreviewProps): JSX.Element { const [percentage, setPercentage] = useState(0) const ref = useRef(null) const fixedUnits = maxMs / 1000 > 3600 ? 3 : 2 - const content = colonDelimitedDuration(minMs / 1000 + ((maxMs - minMs) / 1000) * percentage, fixedUnits) + + const progressionSeconds = ((maxMs - minMs) / 1000) * percentage + + const absoluteTime = startTime?.add(progressionSeconds, 'seconds') + + const content = + timestampFormat === TimestampFormat.Relative + ? colonDelimitedDuration(minMs / 1000 + progressionSeconds, fixedUnits) + : absoluteTime + ? (timestampFormat === TimestampFormat.UTC ? absoluteTime?.tz('UTC') : absoluteTime)?.format('HH:mm:ss') + : '00:00:00' const isHovering = useIsHovering(seekBarRef) diff --git a/frontend/src/scenes/session-recordings/player/controller/Seekbar.tsx b/frontend/src/scenes/session-recordings/player/controller/Seekbar.tsx index 5b70034663f04..4274979dcfe65 100644 --- a/frontend/src/scenes/session-recordings/player/controller/Seekbar.tsx +++ b/frontend/src/scenes/session-recordings/player/controller/Seekbar.tsx @@ -7,6 +7,7 @@ import { useEffect, useRef } from 'react' import { RecordingSegment } from '~/types' import { playerInspectorLogic } from '../inspector/playerInspectorLogic' +import { playerSettingsLogic } from '../playerSettingsLogic' import { sessionRecordingDataLogic } from '../sessionRecordingDataLogic' import { sessionRecordingPlayerLogic } from '../sessionRecordingPlayerLogic' import { PlayerSeekbarPreview } from './PlayerSeekbarPreview' @@ -18,6 +19,7 @@ export function Seekbar(): JSX.Element { const { seekToTime } = useActions(sessionRecordingPlayerLogic) const { seekbarItems } = useValues(playerInspectorLogic(logicProps)) const { endTimeMs, thumbLeftPos, bufferPercent, isScrubbing } = useValues(seekbarLogic(logicProps)) + const { timestampFormat } = useValues(playerSettingsLogic) const { handleDown, setSlider, setThumb } = useActions(seekbarLogic(logicProps)) const { sessionPlayerData, sessionPlayerMetaData } = useValues(sessionRecordingDataLogic(logicProps)) @@ -84,6 +86,8 @@ export function Seekbar(): JSX.Element { activeMs={ sessionPlayerMetaData?.active_seconds ? sessionPlayerMetaData.active_seconds * 1000 : null } + timestampFormat={timestampFormat} + startTime={sessionPlayerData.start} />
diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.test.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.test.ts index 13279a313cf97..7a00e7b49b323 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.test.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.test.ts @@ -2,8 +2,10 @@ import { expectLogic } from 'kea-test-utils' import { api, MOCK_TEAM_ID } from 'lib/api.mock' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import { convertSnapshotsByWindowId } from 'scenes/session-recordings/__mocks__/recording_snapshots' +import { encodedWebSnapshotData } from 'scenes/session-recordings/player/__mocks__/encoded-snapshot-data' import { deduplicateSnapshots, + parseEncodedSnapshots, sessionRecordingDataLogic, } from 'scenes/session-recordings/player/sessionRecordingDataLogic' import { teamLogic } from 'scenes/teamLogic' @@ -404,4 +406,30 @@ describe('sessionRecordingDataLogic', () => { ]) }) }) + + describe('snapshot parsing', () => { + const sessionId = '12345' + const numberOfParsedLinesInData = 8 + it('handles normal web data', async () => { + const parsed = await parseEncodedSnapshots(encodedWebSnapshotData, sessionId, false) + expect(parsed.length).toEqual(numberOfParsedLinesInData) + expect(parsed).toMatchSnapshot() + }) + + it('handles data with unparseable lines', async () => { + const parsed = await parseEncodedSnapshots( + encodedWebSnapshotData.map((line, index) => { + return index == 0 ? line.substring(0, line.length / 2) : line + }), + sessionId, + false + ) + + // unparseable lines are not returned + expect(encodedWebSnapshotData.length).toEqual(2) + expect(parsed.length).toEqual(numberOfParsedLinesInData / 2) + + expect(parsed).toMatchSnapshot() + }) + }) }) diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts index 953a180473e94..cd503f914a29b 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts @@ -1,6 +1,6 @@ import posthogEE from '@posthog/ee/exports' -import { customEvent, EventType, eventWithTime } from '@rrweb/types' -import { captureException } from '@sentry/react' +import { customEvent, EventType, eventWithTime, fullSnapshotEvent } from '@rrweb/types' +import { captureException, captureMessage } from '@sentry/react' import { actions, afterMount, @@ -21,11 +21,13 @@ import api from 'lib/api' import { FEATURE_FLAGS } from 'lib/constants' import { Dayjs, dayjs } from 'lib/dayjs' import { featureFlagLogic, FeatureFlagsSet } from 'lib/logic/featureFlagLogic' +import { isObject } from 'lib/utils' import { chainToElements } from 'lib/utils/elements-chain' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import posthog from 'posthog-js' -import { NodeKind } from '~/queries/schema' +import { HogQLQuery, NodeKind } from '~/queries/schema' +import { hogql } from '~/queries/utils' import { AnyPropertyFilter, EncodedRecordingSnapshot, @@ -62,6 +64,54 @@ function isRecordingSnapshot(x: unknown): x is RecordingSnapshot { return typeof x === 'object' && x !== null && 'type' in x && 'timestamp' in x } +/* + there was a bug in mobile SDK that didn't consistently send a meta event with a full snapshot. + rrweb player hides itself until it has seen the meta event 🤷 + but we can patch a meta event into the recording data to make it work +*/ +function patchMetaEventIntoMobileData(parsedLines: RecordingSnapshot[]): RecordingSnapshot[] { + let fullSnapshotIndex: number = -1 + let metaIndex: number = -1 + try { + fullSnapshotIndex = parsedLines.findIndex((l) => l.type === EventType.FullSnapshot) + metaIndex = parsedLines.findIndex((l) => l.type === EventType.Meta) + + // then we need to patch the meta event into the snapshot data + if (fullSnapshotIndex > -1 && metaIndex === -1) { + const fullSnapshot = parsedLines[fullSnapshotIndex] as RecordingSnapshot & fullSnapshotEvent & eventWithTime + // a full snapshot (particularly from the mobile transformer) has a relatively fixed structure, + // but the types exposed by rrweb don't quite cover what we need , so... + const mainNode = fullSnapshot.data.node as any + const targetNode = mainNode.childNodes[1].childNodes[1].childNodes[0] + const { width, height } = targetNode.attributes + const metaEvent: RecordingSnapshot = { + windowId: fullSnapshot.windowId, + type: EventType.Meta, + timestamp: fullSnapshot.timestamp, + data: { + href: getHrefFromSnapshot(fullSnapshot) || '', + width, + height, + }, + } + parsedLines.splice(fullSnapshotIndex, 0, metaEvent) + } + } catch (e) { + captureException(e, { + tags: { feature: 'session-recording-missing-meta-patching' }, + extra: { fullSnapshotIndex, metaIndex }, + }) + } + + return parsedLines +} + +function hasAnyWireframes(snapshotData: Record[]): boolean { + return snapshotData.some((d) => { + return isObject(d.data) && 'wireframes' in d.data + }) +} + export const parseEncodedSnapshots = async ( items: (RecordingSnapshot | EncodedRecordingSnapshot | string)[], sessionId: string, @@ -71,9 +121,12 @@ export const parseEncodedSnapshots = async ( if (!postHogEEModule) { postHogEEModule = await posthogEE() } + const lineCount = items.length const unparseableLines: string[] = [] - const parsedLines = items.flatMap((l) => { + let isMobileSnapshots = false + + const parsedLines: RecordingSnapshot[] = items.flatMap((l) => { if (!l) { // blob files have an empty line at the end return [] @@ -82,6 +135,10 @@ export const parseEncodedSnapshots = async ( const snapshotLine = typeof l === 'string' ? (JSON.parse(l) as EncodedRecordingSnapshot) : l const snapshotData = isRecordingSnapshot(snapshotLine) ? [snapshotLine] : snapshotLine['data'] + if (!isMobileSnapshots) { + isMobileSnapshots = hasAnyWireframes(snapshotData) + } + return snapshotData.map((d: unknown) => { const snap = withMobileTransformer ? postHogEEModule?.mobileReplay?.transformEventToWeb(d) || (d as eventWithTime) @@ -117,11 +174,13 @@ export const parseEncodedSnapshots = async ( }) } - return parsedLines + return isMobileSnapshots ? patchMetaEventIntoMobileData(parsedLines) : parsedLines } -const getHrefFromSnapshot = (snapshot: RecordingSnapshot): string | undefined => { - return (snapshot.data as any)?.href || (snapshot.data as any)?.payload?.href +const getHrefFromSnapshot = (snapshot: unknown): string | undefined => { + return isObject(snapshot) && 'data' in snapshot + ? (snapshot.data as any)?.href || (snapshot.data as any)?.payload?.href + : undefined } /* @@ -486,23 +545,34 @@ export const sessionRecordingDataLogic = kea([ return values.sessionEventsData } - const { person } = values.sessionPlayerData + if (!event.id) { + captureMessage('event id not available for matching', { + tags: { feature: 'session-recording-load-full-event-data' }, + extra: { event }, + }) + return values.sessionEventsData + } let loadedProperties: Record = existingEvent.properties - // TODO: Move this to an optimised HogQL query when available... + try { - const res: any = await api.query({ - kind: 'EventsQuery', - select: ['properties', 'timestamp'], - orderBy: ['timestamp ASC'], - limit: 100, - personId: String(person?.id), - after: dayjs(event.timestamp).subtract(1000, 'ms').format(), - before: dayjs(event.timestamp).add(1000, 'ms').format(), - event: existingEvent.event, - }) + const query: HogQLQuery = { + kind: NodeKind.HogQLQuery, + query: hogql`SELECT properties, uuid + FROM events + WHERE timestamp > ${dayjs(event.timestamp).subtract(1000, 'ms')} + AND timestamp < ${dayjs(event.timestamp).add(1000, 'ms')} + AND event = ${event.event} + AND uuid = ${event.id}`, + } + const response = await api.query(query) + if (response.error) { + throw new Error(response.error) + } - const result = res.results.find((x: any) => x[1] === event.timestamp) + const result = response.results.find((x: any) => { + return x[1] === event.id + }) if (result) { loadedProperties = JSON.parse(result[0]) @@ -512,7 +582,9 @@ export const sessionRecordingDataLogic = kea([ } catch (e) { // NOTE: This is not ideal but should happen so rarely that it is tolerable. existingEvent.fullyLoaded = true - captureException(e) + captureException(e, { + tags: { feature: 'session-recording-load-full-event-data' }, + }) } // here we map the events list because we want the result to be a new instance to trigger downstream recalculation diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts index dd56272db691a..7bede32af5ad9 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts @@ -1063,7 +1063,7 @@ export const sessionRecordingPlayerLogic = kea( cache.hasInitialized = false document.removeEventListener('fullscreenchange', cache.fullScreenListener) cache.pausedMediaElements = [] - values.player?.replayer?.pause() + values.player?.replayer?.destroy() actions.setPlayer(null) cache.unmountConsoleWarns?.() diff --git a/frontend/src/scenes/settings/SettingsScene.stories.tsx b/frontend/src/scenes/settings/SettingsScene.stories.tsx index 0aab7a2bf98a3..8ebdd09f15b14 100644 --- a/frontend/src/scenes/settings/SettingsScene.stories.tsx +++ b/frontend/src/scenes/settings/SettingsScene.stories.tsx @@ -1,6 +1,6 @@ import { Meta, StoryFn } from '@storybook/react' import { router } from 'kea-router' -import { MOCK_DEFAULT_USER } from 'lib/api.mock' +import { MOCK_DEFAULT_TEAM, MOCK_DEFAULT_USER } from 'lib/api.mock' import { useEffect } from 'react' import { App } from 'scenes/App' import { urls } from 'scenes/urls' @@ -27,6 +27,13 @@ const meta: Meta = { }, '/api/projects/:id/integrations': { results: [] }, }, + patch: { + '/api/projects/:id': async (req, res, ctx) => { + // bounce the setting back as is + const newTeamSettings = { ...MOCK_DEFAULT_TEAM, ...(await req.json()) } + return res(ctx.json(newTeamSettings)) + }, + }, }), ], } @@ -77,6 +84,16 @@ SettingsOrganization.parameters = { testOptions: { waitForSelector: '.Settings__sections button' }, } +export const SettingsWebVitals: StoryFn = () => { + useEffect(() => { + router.actions.push(urls.settings('project-autocapture', 'web-vitals-autocapture')) + }, []) + return +} +SettingsOrganization.parameters = { + testOptions: { waitForSelector: '.Settings__sections button' }, +} + function TimeSensitiveSettings(props: { has_password?: boolean saml_available?: boolean diff --git a/frontend/src/scenes/settings/project/AutocaptureSettings.tsx b/frontend/src/scenes/settings/project/AutocaptureSettings.tsx index 78e30ff29e0fd..c3879157f131f 100644 --- a/frontend/src/scenes/settings/project/AutocaptureSettings.tsx +++ b/frontend/src/scenes/settings/project/AutocaptureSettings.tsx @@ -1,13 +1,58 @@ -import { LemonSwitch, LemonTag, LemonTextArea, Link } from '@posthog/lemon-ui' +import { LemonDivider, LemonSwitch, LemonTag, LemonTextArea, Link } from '@posthog/lemon-ui' import clsx from 'clsx' import { useActions, useValues } from 'kea' import { PropertyKeyInfo } from 'lib/components/PropertyKeyInfo' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' +import { SupportedWebVitalsMetrics } from 'posthog-js' import { teamLogic } from 'scenes/teamLogic' import { userLogic } from 'scenes/userLogic' import { autocaptureExceptionsLogic } from './autocaptureExceptionsLogic' +function WebVitalsAllowedMetricSwitch({ metric }: { metric: SupportedWebVitalsMetrics }): JSX.Element { + const { userLoading } = useValues(userLogic) + const { currentTeam } = useValues(teamLogic) + const { updateCurrentTeam } = useActions(teamLogic) + + return ( + { + if (!currentTeam) { + // shouldn't ever get here without a team, but we certainly can't edit it if it's not there + return + } + + const without = ( + currentTeam?.autocapture_web_vitals_allowed_metrics || ['FCP', 'CLS', 'INP', 'LCP'] + )?.filter((allowedMetric) => allowedMetric !== metric) + if (checked) { + updateCurrentTeam({ + autocapture_web_vitals_allowed_metrics: [...without, metric], + }) + } else { + updateCurrentTeam({ + autocapture_web_vitals_allowed_metrics: [...without], + }) + } + }} + /> + ) +} + export function AutocaptureSettings(): JSX.Element { const { userLoading } = useValues(userLogic) const { currentTeam } = useValues(teamLogic) @@ -131,6 +176,14 @@ export function WebVitalsAutocaptureSettings(): JSX.Element { } bordered /> + +

You can choose which metrics to capture. By default, we capture all metrics.

+
+ + + + +
) } diff --git a/frontend/src/scenes/surveys/SurveyEdit.tsx b/frontend/src/scenes/surveys/SurveyEdit.tsx index 191ed1742db06..a8afb5e6c2ac4 100644 --- a/frontend/src/scenes/surveys/SurveyEdit.tsx +++ b/frontend/src/scenes/surveys/SurveyEdit.tsx @@ -132,15 +132,7 @@ export default function SurveyEdit(): JSX.Element { description="Automatically appears when PostHog JS is installed" value={SurveyType.Popover} > -
+
diff --git a/frontend/src/scenes/teamActivityDescriber.tsx b/frontend/src/scenes/teamActivityDescriber.tsx index ec96d71ceb24b..dd282151377eb 100644 --- a/frontend/src/scenes/teamActivityDescriber.tsx +++ b/frontend/src/scenes/teamActivityDescriber.tsx @@ -158,6 +158,11 @@ const teamActionsMapping: Record< autocapture_web_vitals_opt_in(change: ActivityChange | undefined): ChangeMapping | null { return { description: [<>{change?.after ? 'enabled' : 'disabled'} web vitals autocapture] } }, + autocapture_web_vitals_allowed_metrics(change: ActivityChange | undefined): ChangeMapping | null { + const after = change?.after + const metricsList = Array.isArray(after) ? after.join(', ') : 'CLS, FCP, INP, and LCP' + return { description: [<>set allowed web vitals autocapture metrics to {metricsList}] } + }, autocapture_opt_out(change: ActivityChange | undefined): ChangeMapping | null { return { description: [<>{change?.after ? 'opted in to' : 'opted out of'} autocapture] } }, diff --git a/frontend/src/scenes/urls.ts b/frontend/src/scenes/urls.ts index 996d25c4b622f..e4e6e79afa03a 100644 --- a/frontend/src/scenes/urls.ts +++ b/frontend/src/scenes/urls.ts @@ -150,6 +150,7 @@ export const urls = { /** @param id A UUID or 'new'. ':id' for routing. */ survey: (id: string): string => `/surveys/${id}`, surveyTemplates: (): string => '/survey_templates', + dataModel: (): string => '/data-model', dataWarehouse: (query?: string | Record): string => combineUrl(`/data-warehouse`, {}, query ? { q: typeof query === 'string' ? query : JSON.stringify(query) } : {}) .url, diff --git a/frontend/src/scenes/web-analytics/tiles/WebAnalyticsTile.tsx b/frontend/src/scenes/web-analytics/tiles/WebAnalyticsTile.tsx index 4ce8a66fa7514..9d58fc82eb116 100644 --- a/frontend/src/scenes/web-analytics/tiles/WebAnalyticsTile.tsx +++ b/frontend/src/scenes/web-analytics/tiles/WebAnalyticsTile.tsx @@ -190,6 +190,11 @@ export const webAnalyticsDataTableQueryContext: QueryContext = { render: NumericCell, align: 'right', }, + clicks: { + title: 'Clicks', + render: NumericCell, + align: 'right', + }, visitors: { title: 'Visitors', render: NumericCell, @@ -509,6 +514,33 @@ export const WebGoalsTile = ({
) } + +export const WebExternalClicksTile = ({ + query, + insightProps, +}: { + query: DataTableNode + insightProps: InsightLogicProps +}): JSX.Element | null => { + const { shouldStripQueryParams } = useValues(webAnalyticsLogic) + const { setShouldStripQueryParams } = useActions(webAnalyticsLogic) + return ( +
+
+
+ +
+
+ +
+ ) +} + export const WebQuery = ({ query, showIntervalSelect, @@ -530,6 +562,9 @@ export const WebQuery = ({ /> ) } + if (query.kind === NodeKind.DataTableNode && query.source.kind === NodeKind.WebExternalClicksTableQuery) { + return + } if (query.kind === NodeKind.InsightVizNode) { return } else if (query.kind === NodeKind.DataTableNode && query.source.kind === NodeKind.WebGoalsQuery) { diff --git a/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx b/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx index 2db3b94cefed9..18977f12d9f7d 100644 --- a/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx +++ b/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx @@ -237,10 +237,18 @@ export const webAnalyticsLogic = kea([ setGeographyTab: (tab: string) => ({ tab }), setDates: (dateFrom: string | null, dateTo: string | null) => ({ dateFrom, dateTo }), setInterval: (interval: IntervalType) => ({ interval }), + setDatesAndInterval: (dateFrom: string | null, dateTo: string | null, interval: IntervalType) => ({ + dateFrom, + dateTo, + interval, + }), setIsPathCleaningEnabled: (isPathCleaningEnabled: boolean) => ({ isPathCleaningEnabled }), setShouldFilterTestAccounts: (shouldFilterTestAccounts: boolean) => ({ shouldFilterTestAccounts, }), + setShouldStripQueryParams: (shouldStripQueryParams: boolean) => ({ + shouldStripQueryParams, + }), setStateFromUrl: (state: { filters: WebAnalyticsPropertyFilters dateFrom: string | null @@ -421,6 +429,17 @@ export const webAnalyticsLogic = kea([ interval, } }, + setDatesAndInterval: (_, { dateTo, dateFrom, interval }) => { + if (!dateFrom && !dateTo) { + dateFrom = initialDateFrom + dateTo = initialDateTo + } + return { + dateTo, + dateFrom, + interval: interval || getDefaultInterval(dateFrom, dateTo), + } + }, setStateFromUrl: (_, { state: { dateTo, dateFrom, interval } }) => { if (!dateFrom && !dateTo) { dateFrom = initialDateFrom @@ -441,6 +460,13 @@ export const webAnalyticsLogic = kea([ setShouldFilterTestAccounts: (_, { shouldFilterTestAccounts }) => shouldFilterTestAccounts, }, ], + shouldStripQueryParams: [ + false as boolean, + { persist: true }, + { + setShouldStripQueryParams: (_, { shouldStripQueryParams }) => shouldStripQueryParams, + }, + ], }), selectors(({ actions, values }) => ({ graphsTab: [(s) => [s._graphsTab], (graphsTab: string | null) => graphsTab || GraphsTab.UNIQUE_USERS], @@ -470,6 +496,7 @@ export const webAnalyticsLogic = kea([ () => values.isGreaterThanMd, () => values.shouldShowGeographyTile, () => values.featureFlags, + () => values.shouldStripQueryParams, ], ( webAnalyticsFilters, @@ -481,7 +508,8 @@ export const webAnalyticsLogic = kea([ _statusCheck, isGreaterThanMd, shouldShowGeographyTile, - featureFlags + featureFlags, + shouldStripQueryParams ): WebDashboardTile[] => { const dateRange = { date_from: dateFrom, @@ -725,20 +753,19 @@ export const webAnalyticsLogic = kea([ featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_LAST_CLICK] ? { id: PathTab.EXIT_CLICK, - title: 'Exit clicks', - linkText: 'Exit clicks', + title: 'Outbound link clicks', + linkText: 'Outbound clicks', query: { full: true, kind: NodeKind.DataTableNode, source: { - kind: NodeKind.WebStatsTableQuery, + kind: NodeKind.WebExternalClicksTableQuery, properties: webAnalyticsFilters, - breakdownBy: WebStatsBreakdown.ExitClick, dateRange, - includeScrollDepth: false, sampling, limit: 10, filterTestAccounts, + stripQueryParams: shouldStripQueryParams, }, embedded: false, }, @@ -1541,11 +1568,8 @@ export const webAnalyticsLogic = kea([ if (parsedFilters) { actions.setWebAnalyticsFilters(parsedFilters) } - if (date_from || date_to) { - actions.setDates(date_from, date_to) - } - if (interval) { - actions.setInterval(interval) + if (date_from || date_to || interval) { + actions.setDatesAndInterval(date_from, date_to, interval) } if (device_tab) { actions.setDeviceTab(device_tab) diff --git a/frontend/src/toolbar/bar/toolbarLogic.ts b/frontend/src/toolbar/bar/toolbarLogic.ts index 20148d0ca3b40..2176e5650cfee 100644 --- a/frontend/src/toolbar/bar/toolbarLogic.ts +++ b/frontend/src/toolbar/bar/toolbarLogic.ts @@ -274,7 +274,7 @@ export const toolbarLogic = kea([ setVisibleMenu: ({ visibleMenu }) => { if (visibleMenu === 'heatmap') { actions.enableHeatmap() - values.hedgehogActor?.setAnimation('heatmaps') + values.hedgehogActor?.setOnFire(1) } else if (visibleMenu === 'actions') { actions.showButtonActions() values.hedgehogActor?.setAnimation('action') diff --git a/frontend/src/toolbar/hedgehog/HedgehogButton.tsx b/frontend/src/toolbar/hedgehog/HedgehogButton.tsx index d0082012e0367..22403fa431ffb 100644 --- a/frontend/src/toolbar/hedgehog/HedgehogButton.tsx +++ b/frontend/src/toolbar/hedgehog/HedgehogButton.tsx @@ -15,7 +15,7 @@ export function HedgehogButton(): JSX.Element { useEffect(() => { if (heatmapEnabled) { - hedgehogActor?.setAnimation('heatmaps') + hedgehogActor?.setOnFire(1) } }, [heatmapEnabled]) diff --git a/frontend/src/types.ts b/frontend/src/types.ts index a48a4d0438f4e..037f150978738 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -22,7 +22,7 @@ import { } from 'lib/constants' import { Dayjs, dayjs } from 'lib/dayjs' import { PopoverProps } from 'lib/lemon-ui/Popover/Popover' -import type { PostHog } from 'posthog-js' +import type { PostHog, SupportedWebVitalsMetrics } from 'posthog-js' import { Layout } from 'react-grid-layout' import { LogLevel } from 'rrweb' import { BehavioralFilterKey, BehavioralFilterType } from 'scenes/cohorts/CohortFilters/types' @@ -295,9 +295,12 @@ export interface MinimalHedgehogConfig { accessories: string[] } +export type HedgehogSkin = 'default' | 'spiderhog' + export interface HedgehogConfig extends MinimalHedgehogConfig { enabled: boolean color: HedgehogColorOptions | null + skin?: HedgehogSkin accessories: string[] walking_enabled: boolean interactions_enabled: boolean @@ -489,6 +492,7 @@ export interface TeamType extends TeamBasicType { session_replay_config: { record_canvas?: boolean; ai_config?: SessionRecordingAIConfig } | undefined | null autocapture_exceptions_opt_in: boolean autocapture_web_vitals_opt_in?: boolean + autocapture_web_vitals_allowed_metrics?: SupportedWebVitalsMetrics[] surveys_opt_in?: boolean heatmaps_opt_in?: boolean autocapture_exceptions_errors_to_ignore: string[] @@ -1800,6 +1804,7 @@ export interface DashboardTemplateVariableType { type: 'event' default: Record required: boolean + touched?: boolean } export type DashboardLayoutSize = 'sm' | 'xs' @@ -3864,6 +3869,7 @@ export const externalDataSources = [ 'Zendesk', 'Snowflake', 'Salesforce', + 'Vitally', ] as const export type ExternalDataSourceType = (typeof externalDataSources)[number] diff --git a/hogvm/__tests__/__snapshots__/arrays.hoge b/hogvm/__tests__/__snapshots__/arrays.hoge index 4afcc23ad537a..37cfb21b32bde 100644 --- a/hogvm/__tests__/__snapshots__/arrays.hoge +++ b/hogvm/__tests__/__snapshots__/arrays.hoge @@ -24,4 +24,7 @@ 35, 32, "------", 2, "print", 1, 35, 36, 2, 33, 0, 2, "has", 2, 2, "print", 1, 35, 36, 2, 33, 2, 2, "has", 2, 2, "print", 1, 35, 36, 2, 32, "banana", 2, "has", 2, 2, "print", 1, 35, 32, "banananas", 32, "banana", 2, "has", 2, 2, "print", 1, 35, 32, "banananas", 32, "foo", 2, "has", 2, 2, "print", 1, 35, 32, "1", 32, "2", 43, 2, 32, "1", 2, "has", -2, 2, "print", 1, 35, 35, 35, 35] +2, 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 43, 3, 33, 1, 2, "indexOf", 2, 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 43, 3, +33, 2, 2, "indexOf", 2, 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 43, 3, 33, 3, 2, "indexOf", 2, 2, "print", 1, 35, 33, 1, +33, 2, 33, 3, 43, 3, 33, 4, 2, "indexOf", 2, 2, "print", 1, 35, 52, "lambda", 1, 0, 6, 33, 2, 36, 0, 13, 38, 53, 0, 33, +1, 33, 2, 33, 3, 33, 4, 33, 5, 43, 5, 2, "arrayCount", 2, 2, "print", 1, 35, 35, 35, 35] diff --git a/hogvm/__tests__/__snapshots__/arrays.stdout b/hogvm/__tests__/__snapshots__/arrays.stdout index d582bb4c45219..84e5d1b160aff 100644 --- a/hogvm/__tests__/__snapshots__/arrays.stdout +++ b/hogvm/__tests__/__snapshots__/arrays.stdout @@ -46,3 +46,8 @@ false false false true +1 +2 +3 +0 +3 diff --git a/hogvm/__tests__/__snapshots__/stl.hoge b/hogvm/__tests__/__snapshots__/stl.hoge index 3f842a7d4156f..35cad3352f54c 100644 --- a/hogvm/__tests__/__snapshots__/stl.hoge +++ b/hogvm/__tests__/__snapshots__/stl.hoge @@ -14,13 +14,14 @@ "print", 1, 35, 32, "string", 2, "empty", 1, 2, "print", 1, 35, 32, "0", 2, "empty", 1, 2, "print", 1, 35, 43, 0, 2, "empty", 1, 2, "print", 1, 35, 42, 0, 2, "empty", 1, 2, "print", 1, 35, 2, "tuple", 0, 2, "empty", 1, 2, "print", 1, 35, 33, 0, 2, "tuple", 1, 2, "empty", 1, 2, "print", 1, 35, 33, 1, 33, 2, 2, "tuple", 2, 2, "empty", 1, 2, "print", 1, 35, -32, "", 2, "print", 1, 35, 32, "-- notEmpty --", 2, "print", 1, 35, 31, 2, "notEmpty", 1, 2, "print", 1, 35, 33, 0, 2, -"notEmpty", 1, 2, "print", 1, 35, 33, 1, 2, "notEmpty", 1, 2, "print", 1, 35, 33, -1, 2, "notEmpty", 1, 2, "print", 1, -35, 34, 0.0, 2, "notEmpty", 1, 2, "print", 1, 35, 34, 0.01, 2, "notEmpty", 1, 2, "print", 1, 35, 32, "", 2, "notEmpty", -1, 2, "print", 1, 35, 32, "string", 2, "notEmpty", 1, 2, "print", 1, 35, 32, "0", 2, "notEmpty", 1, 2, "print", 1, 35, -43, 0, 2, "notEmpty", 1, 2, "print", 1, 35, 42, 0, 2, "notEmpty", 1, 2, "print", 1, 35, 2, "tuple", 0, 2, "notEmpty", 1, -2, "print", 1, 35, 33, 0, 2, "tuple", 1, 2, "notEmpty", 1, 2, "print", 1, 35, 33, 1, 33, 2, 2, "tuple", 2, 2, -"notEmpty", 1, 2, "print", 1, 35, 32, "", 2, "print", 1, 35, 32, "-- replaceAll, replaceOne --", 2, "print", 1, 35, 32, -"hello world", 32, "l", 32, "L", 2, "replaceAll", 3, 2, "print", 1, 35, 32, "hello world", 32, "l", 32, "L", 2, -"replaceOne", 3, 2, "print", 1, 35, 32, "", 2, "print", 1, 35, 32, "-- generateUUIDv4 --", 2, "print", 1, 35, 2, -"generateUUIDv4", 0, 2, "length", 1, 2, "print", 1, 35] +29, 2, "empty", 1, 2, "print", 1, 35, 30, 2, "empty", 1, 2, "print", 1, 35, 32, "", 2, "print", 1, 35, 32, +"-- notEmpty --", 2, "print", 1, 35, 31, 2, "notEmpty", 1, 2, "print", 1, 35, 33, 0, 2, "notEmpty", 1, 2, "print", 1, +35, 33, 1, 2, "notEmpty", 1, 2, "print", 1, 35, 33, -1, 2, "notEmpty", 1, 2, "print", 1, 35, 34, 0.0, 2, "notEmpty", 1, +2, "print", 1, 35, 34, 0.01, 2, "notEmpty", 1, 2, "print", 1, 35, 32, "", 2, "notEmpty", 1, 2, "print", 1, 35, 32, +"string", 2, "notEmpty", 1, 2, "print", 1, 35, 32, "0", 2, "notEmpty", 1, 2, "print", 1, 35, 43, 0, 2, "notEmpty", 1, 2, +"print", 1, 35, 42, 0, 2, "notEmpty", 1, 2, "print", 1, 35, 2, "tuple", 0, 2, "notEmpty", 1, 2, "print", 1, 35, 33, 0, +2, "tuple", 1, 2, "notEmpty", 1, 2, "print", 1, 35, 33, 1, 33, 2, 2, "tuple", 2, 2, "notEmpty", 1, 2, "print", 1, 35, +29, 2, "notEmpty", 1, 2, "print", 1, 35, 30, 2, "notEmpty", 1, 2, "print", 1, 35, 32, "", 2, "print", 1, 35, 32, +"-- replaceAll, replaceOne --", 2, "print", 1, 35, 32, "hello world", 32, "l", 32, "L", 2, "replaceAll", 3, 2, "print", +1, 35, 32, "hello world", 32, "l", 32, "L", 2, "replaceOne", 3, 2, "print", 1, 35, 32, "", 2, "print", 1, 35, 32, +"-- generateUUIDv4 --", 2, "print", 1, 35, 2, "generateUUIDv4", 0, 2, "length", 1, 2, "print", 1, 35] diff --git a/hogvm/__tests__/__snapshots__/stl.stdout b/hogvm/__tests__/__snapshots__/stl.stdout index d8c2ecb8d346a..f80a819686adc 100644 --- a/hogvm/__tests__/__snapshots__/stl.stdout +++ b/hogvm/__tests__/__snapshots__/stl.stdout @@ -17,10 +17,10 @@ tom & jerry -- empty -- true -true false false -true +false +false false true false @@ -30,13 +30,15 @@ true true false false +false +false -- notEmpty -- false -false true true -false +true +true true false true @@ -46,6 +48,8 @@ false false true true +true +true -- replaceAll, replaceOne -- heLLo worLd diff --git a/hogvm/__tests__/__snapshots__/strings.hoge b/hogvm/__tests__/__snapshots__/strings.hoge index 0c3118d3a043e..2143350e53df1 100644 --- a/hogvm/__tests__/__snapshots__/strings.hoge +++ b/hogvm/__tests__/__snapshots__/strings.hoge @@ -8,4 +8,9 @@ "print", 1, 35, 32, "banana", 32, "n", 2, "like", 2, 2, "print", 1, 35, 32, "banana", 32, "naan", 2, "like", 2, 2, "print", 1, 35, 32, "banana", 32, "N", 2, "ilike", 2, 2, "print", 1, 35, 32, "banana", 32, "n", 2, "ilike", 2, 2, "print", 1, 35, 32, "banana", 32, "naan", 2, "ilike", 2, 2, "print", 1, 35, 32, "banana", 32, "N", 2, "notLike", 2, 2, -"print", 1, 35, 32, "banana", 32, "NO", 2, "notILike", 2, 2, "print", 1, 35] +"print", 1, 35, 32, "banana", 32, "NO", 2, "notILike", 2, 2, "print", 1, 35, 32, "abc", 32, "a", 2, "position", 2, 2, +"print", 1, 35, 32, "abc", 32, "b", 2, "position", 2, 2, "print", 1, 35, 32, "abc", 32, "c", 2, "position", 2, 2, +"print", 1, 35, 32, "abc", 32, "d", 2, "position", 2, 2, "print", 1, 35, 32, "AbC", 32, "a", 2, +"positionCaseInsensitive", 2, 2, "print", 1, 35, 32, "AbC", 32, "b", 2, "positionCaseInsensitive", 2, 2, "print", 1, 35, +32, "AbC", 32, "c", 2, "positionCaseInsensitive", 2, 2, "print", 1, 35, 32, "AbC", 32, "d", 2, +"positionCaseInsensitive", 2, 2, "print", 1, 35] diff --git a/hogvm/__tests__/__snapshots__/strings.stdout b/hogvm/__tests__/__snapshots__/strings.stdout index 4c5bfc110b5b7..57808971dd3f3 100644 --- a/hogvm/__tests__/__snapshots__/strings.stdout +++ b/hogvm/__tests__/__snapshots__/strings.stdout @@ -16,3 +16,11 @@ true false true true +1 +2 +3 +0 +1 +2 +3 +0 diff --git a/hogvm/__tests__/arrays.hog b/hogvm/__tests__/arrays.hog index 6f934807566d9..67d07c451a1ff 100644 --- a/hogvm/__tests__/arrays.hog +++ b/hogvm/__tests__/arrays.hog @@ -64,3 +64,10 @@ print(has(arr, 'banana')) print(has('banananas', 'banana')) print(has('banananas', 'foo')) print(has(['1', '2'], '1')) + +print(indexOf([1,2,3], 1)) // 1 +print(indexOf([1,2,3], 2)) // 2 +print(indexOf([1,2,3], 3)) // 3 +print(indexOf([1,2,3], 4)) // 0 + +print(arrayCount(x -> x > 2, [1,2,3,4,5])) // 3 \ No newline at end of file diff --git a/hogvm/__tests__/stl.hog b/hogvm/__tests__/stl.hog index 3fb041c261677..2343492d90d71 100644 --- a/hogvm/__tests__/stl.hog +++ b/hogvm/__tests__/stl.hog @@ -30,6 +30,8 @@ print(empty({})) print(empty(tuple())) print(empty(tuple(0))) print(empty(tuple(1,2))) +print(empty(true)) +print(empty(false)) print('') print('-- notEmpty --') print(notEmpty(null)) @@ -46,6 +48,8 @@ print(notEmpty({})) print(notEmpty(tuple())) print(notEmpty(tuple(0))) print(notEmpty(tuple(1,2))) +print(notEmpty(true)) +print(notEmpty(false)) print('') print('-- replaceAll, replaceOne --') print(replaceAll('hello world', 'l', 'L')) diff --git a/hogvm/__tests__/strings.hog b/hogvm/__tests__/strings.hog index 322e08bdc51ef..e8d0eab7b8e93 100644 --- a/hogvm/__tests__/strings.hog +++ b/hogvm/__tests__/strings.hog @@ -16,3 +16,11 @@ print(ilike('banana', 'n')) print(ilike('banana', 'naan')) print(notLike('banana', 'N')) print(notILike('banana', 'NO')) +print(position('abc', 'a')) // 1 +print(position('abc', 'b')) // 2 +print(position('abc', 'c')) // 3 +print(position('abc', 'd')) // 0 +print(positionCaseInsensitive('AbC', 'a')) // 1 +print(positionCaseInsensitive('AbC', 'b')) // 2 +print(positionCaseInsensitive('AbC', 'c')) // 3 +print(positionCaseInsensitive('AbC', 'd')) // 0 diff --git a/hogvm/python/stl/__init__.py b/hogvm/python/stl/__init__.py index feaae8a899aeb..9312d6d5afbb8 100644 --- a/hogvm/python/stl/__init__.py +++ b/hogvm/python/stl/__init__.py @@ -100,6 +100,12 @@ def ifNull(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], return args[1] +def empty(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float): + if isinstance(args[0], bool) or isinstance(args[0], int) or isinstance(args[0], float): + return False + return not bool(args[0]) + + def sleep(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float): time.sleep(args[0]) return None @@ -367,8 +373,10 @@ def _typeof(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]] "toFloat": STLFunction(fn=toFloat, minArgs=1, maxArgs=1), "ifNull": STLFunction(fn=ifNull, minArgs=2, maxArgs=2), "length": STLFunction(fn=lambda args, team, stdout, timeout: len(args[0]), minArgs=1, maxArgs=1), - "empty": STLFunction(fn=lambda args, team, stdout, timeout: not bool(args[0]), minArgs=1, maxArgs=1), - "notEmpty": STLFunction(fn=lambda args, team, stdout, timeout: bool(args[0]), minArgs=1, maxArgs=1), + "empty": STLFunction(fn=empty, minArgs=1, maxArgs=1), + "notEmpty": STLFunction( + fn=lambda args, team, stdout, timeout: not empty(args, team, stdout, timeout), minArgs=1, maxArgs=1 + ), "tuple": STLFunction(fn=lambda args, team, stdout, timeout: tuple(args), minArgs=0, maxArgs=None), "lower": STLFunction(fn=lambda args, team, stdout, timeout: args[0].lower(), minArgs=1, maxArgs=1), "upper": STLFunction(fn=lambda args, team, stdout, timeout: args[0].upper(), minArgs=1, maxArgs=1), @@ -386,6 +394,20 @@ def _typeof(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]] "replaceAll": STLFunction( fn=lambda args, team, stdout, timeout: args[0].replace(args[1], args[2]), minArgs=3, maxArgs=3 ), + "position": STLFunction( + fn=lambda args, team, stdout, timeout: (args[0].index(str(args[1])) + 1) + if isinstance(args[0], str) and str(args[1]) in args[0] + else 0, + minArgs=2, + maxArgs=2, + ), + "positionCaseInsensitive": STLFunction( + fn=lambda args, team, stdout, timeout: (args[0].lower().index(str(args[1]).lower()) + 1) + if isinstance(args[0], str) and str(args[1]).lower() in args[0].lower() + else 0, + minArgs=2, + maxArgs=2, + ), "trim": STLFunction(fn=trim, minArgs=1, maxArgs=2), "trimLeft": STLFunction(fn=trimLeft, minArgs=1, maxArgs=2), "trimRight": STLFunction(fn=trimRight, minArgs=1, maxArgs=2), @@ -398,6 +420,13 @@ def _typeof(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]] ), "keys": STLFunction(fn=keys, minArgs=1, maxArgs=1), "values": STLFunction(fn=values, minArgs=1, maxArgs=1), + "indexOf": STLFunction( + fn=lambda args, team, stdout, timeout: (args[0].index(args[1]) + 1) + if isinstance(args[0], list) and args[1] in args[0] + else 0, + minArgs=2, + maxArgs=2, + ), "arrayPushBack": STLFunction(fn=arrayPushBack, minArgs=2, maxArgs=2), "arrayPushFront": STLFunction(fn=arrayPushFront, minArgs=2, maxArgs=2), "arrayPopBack": STLFunction(fn=arrayPopBack, minArgs=1, maxArgs=1), diff --git a/hogvm/python/stl/bytecode.py b/hogvm/python/stl/bytecode.py index c84573e89e300..748128e075dce 100644 --- a/hogvm/python/stl/bytecode.py +++ b/hogvm/python/stl/bytecode.py @@ -1,6 +1,7 @@ # This file is generated by hogvm/stl/compile.py # fmt: off BYTECODE_STL: dict[str, tuple[list[str], list]] = { + "arrayCount": (["func", "arr"], [33, 0, 36, 1, 36, 3, 2, "values", 1, 33, 1, 36, 4, 2, "length", 1, 31, 36, 6, 36, 5, 16, 40, 31, 36, 4, 36, 5, 45, 37, 7, 36, 7, 36, 0, 54, 1, 40, 7, 33, 1, 36, 2, 6, 37, 2, 36, 5, 33, 1, 6, 37, 5, 39, -38, 35, 35, 35, 35, 35, 36, 2, 38, 35]), "arrayExists": (["func", "arr"], [36, 1, 36, 2, 2, "values", 1, 33, 1, 36, 3, 2, "length", 1, 31, 36, 5, 36, 4, 16, 40, 26, 36, 3, 36, 4, 45, 37, 6, 36, 6, 36, 0, 54, 1, 40, 2, 29, 38, 36, 4, 33, 1, 6, 37, 4, 39, -33, 35, 35, 35, 35, 35, 30, 38]), "arrayFilter": (["func", "arr"], [43, 0, 36, 1, 36, 3, 2, "values", 1, 33, 1, 36, 4, 2, "length", 1, 31, 36, 6, 36, 5, 16, 40, 33, 36, 4, 36, 5, 45, 37, 7, 36, 7, 36, 0, 54, 1, 40, 9, 36, 2, 36, 7, 2, "arrayPushBack", 2, 37, 2, 36, 5, 33, 1, 6, 37, 5, 39, -40, 35, 35, 35, 35, 35, 36, 2, 38, 35]), "arrayMap": (["func", "arr"], [43, 0, 36, 1, 36, 3, 2, "values", 1, 33, 1, 36, 4, 2, "length", 1, 31, 36, 6, 36, 5, 16, 40, 29, 36, 4, 36, 5, 45, 37, 7, 36, 2, 36, 7, 36, 0, 54, 1, 2, "arrayPushBack", 2, 37, 2, 36, 5, 33, 1, 6, 37, 5, 39, -36, 35, 35, 35, 35, 35, 36, 2, 38, 35]), diff --git a/hogvm/stl/src/arrayCount.hog b/hogvm/stl/src/arrayCount.hog new file mode 100644 index 0000000000000..da2cac89c27fa --- /dev/null +++ b/hogvm/stl/src/arrayCount.hog @@ -0,0 +1,9 @@ +fn arrayCount(func, arr) { + let count := 0 + for (let i in arr) { + if (func(i)) { + count := count + 1 + } + } + return count +} diff --git a/hogvm/typescript/package.json b/hogvm/typescript/package.json index 8a52d73906343..b046816d2aa9e 100644 --- a/hogvm/typescript/package.json +++ b/hogvm/typescript/package.json @@ -1,6 +1,6 @@ { "name": "@posthog/hogvm", - "version": "1.0.44", + "version": "1.0.47", "description": "PostHog Hog Virtual Machine", "types": "dist/index.d.ts", "source": "src/index.ts", diff --git a/hogvm/typescript/src/stl/bytecode.ts b/hogvm/typescript/src/stl/bytecode.ts index d966188adaeaa..50e417c8ccb90 100644 --- a/hogvm/typescript/src/stl/bytecode.ts +++ b/hogvm/typescript/src/stl/bytecode.ts @@ -1,5 +1,6 @@ // This file is generated by hogvm/stl/compile.py export const BYTECODE_STL: Record = { + "arrayCount": [["func", "arr"], [33, 0, 36, 1, 36, 3, 2, "values", 1, 33, 1, 36, 4, 2, "length", 1, 31, 36, 6, 36, 5, 16, 40, 31, 36, 4, 36, 5, 45, 37, 7, 36, 7, 36, 0, 54, 1, 40, 7, 33, 1, 36, 2, 6, 37, 2, 36, 5, 33, 1, 6, 37, 5, 39, -38, 35, 35, 35, 35, 35, 36, 2, 38, 35]], "arrayExists": [["func", "arr"], [36, 1, 36, 2, 2, "values", 1, 33, 1, 36, 3, 2, "length", 1, 31, 36, 5, 36, 4, 16, 40, 26, 36, 3, 36, 4, 45, 37, 6, 36, 6, 36, 0, 54, 1, 40, 2, 29, 38, 36, 4, 33, 1, 6, 37, 4, 39, -33, 35, 35, 35, 35, 35, 30, 38]], "arrayFilter": [["func", "arr"], [43, 0, 36, 1, 36, 3, 2, "values", 1, 33, 1, 36, 4, 2, "length", 1, 31, 36, 6, 36, 5, 16, 40, 33, 36, 4, 36, 5, 45, 37, 7, 36, 7, 36, 0, 54, 1, 40, 9, 36, 2, 36, 7, 2, "arrayPushBack", 2, 37, 2, 36, 5, 33, 1, 6, 37, 5, 39, -40, 35, 35, 35, 35, 35, 36, 2, 38, 35]], "arrayMap": [["func", "arr"], [43, 0, 36, 1, 36, 3, 2, "values", 1, 33, 1, 36, 4, 2, "length", 1, 31, 36, 6, 36, 5, 16, 40, 29, 36, 4, 36, 5, 45, 37, 7, 36, 2, 36, 7, 36, 0, 54, 1, 2, "arrayPushBack", 2, 37, 2, 36, 5, 33, 1, 6, 37, 5, 39, -36, 35, 35, 35, 35, 35, 36, 2, 38, 35]], diff --git a/hogvm/typescript/src/stl/stl.ts b/hogvm/typescript/src/stl/stl.ts index 1e8282730900b..328621560c702 100644 --- a/hogvm/typescript/src/stl/stl.ts +++ b/hogvm/typescript/src/stl/stl.ts @@ -115,6 +115,8 @@ export const STL: Record = { return args[0].size === 0 } return Object.keys(args[0]).length === 0 + } else if (typeof args[0] === 'number' || typeof args[0] === 'boolean') { + return false } return !args[0] }, @@ -294,6 +296,28 @@ export const STL: Record = { minArgs: 3, maxArgs: 3, }, + position: { + fn: ([str, elem]) => { + if (typeof str === 'string') { + return str.indexOf(String(elem)) + 1 + } else { + return 0 + } + }, + minArgs: 2, + maxArgs: 2, + }, + positionCaseInsensitive: { + fn: ([str, elem]) => { + if (typeof str === 'string') { + return str.toLowerCase().indexOf(String(elem).toLowerCase()) + 1 + } else { + return 0 + } + }, + minArgs: 2, + maxArgs: 2, + }, trim: { fn: ([str, char]) => { if (char === null || char === undefined) { @@ -418,6 +442,17 @@ export const STL: Record = { minArgs: 1, maxArgs: 1, }, + indexOf: { + fn: ([arrOrString, elem]) => { + if (Array.isArray(arrOrString)) { + return arrOrString.indexOf(elem) + 1 + } else { + return 0 + } + }, + minArgs: 2, + maxArgs: 2, + }, arrayPushBack: { fn: ([arr, item]) => { if (!Array.isArray(arr)) { diff --git a/latest_migrations.manifest b/latest_migrations.manifest index 85b48ed0ed16f..1b9ca11ec4e13 100644 --- a/latest_migrations.manifest +++ b/latest_migrations.manifest @@ -5,7 +5,7 @@ contenttypes: 0002_remove_content_type_name ee: 0016_rolemembership_organization_member otp_static: 0002_throttling otp_totp: 0002_auto_20190420_0723 -posthog: 0465_datawarehouse_stripe_account +posthog: 0467_add_web_vitals_allowed_metrics sessions: 0001_initial social_django: 0010_uid_db_index two_factor: 0007_auto_20201201_1019 diff --git a/mypy-baseline.txt b/mypy-baseline.txt index 9e25498b357dd..149978d16ec83 100644 --- a/mypy-baseline.txt +++ b/mypy-baseline.txt @@ -42,6 +42,15 @@ posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argume posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible default for argument "resolved_param" (default has type "ResolvedParam | None", argument has type "ResolvedParam") [assignment] posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument "module" to "SourceInfo" has incompatible type Module | None; expected Module [arg-type] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/utils.py:0: error: No overload variant of "asdict" matches argument type "type[DataclassInstance]" [call-overload] posthog/utils.py:0: note: Possible overload variants: posthog/utils.py:0: note: def asdict(obj: DataclassInstance) -> dict[str, Any] @@ -103,8 +112,6 @@ posthog/models/filters/base_filter.py:0: error: "HogQLContext" has no attribute posthog/models/team/team.py:0: error: Statement is unreachable [unreachable] posthog/models/team/team.py:0: error: Statement is unreachable [unreachable] posthog/models/hog_functions/hog_function.py:0: error: Argument 1 to "get" of "dict" has incompatible type "str | None"; expected "str" [arg-type] -posthog/models/hog_functions/hog_function.py:0: error: Argument 2 to "get_hog_function_status" has incompatible type "UUID"; expected "str" [arg-type] -posthog/models/hog_functions/hog_function.py:0: error: Argument 2 to "patch_hog_function_status" has incompatible type "UUID"; expected "str" [arg-type] posthog/models/user.py:0: error: Incompatible types in assignment (expression has type "type[User]", base class "BaseManager" defined the type as "type[_T]") [assignment] posthog/models/user.py:0: error: Cannot override class variable (previously declared on base class "AbstractBaseUser") with instance variable [misc] posthog/models/user.py:0: error: Incompatible types in assignment (expression has type "None", base class "AbstractUser" defined the type as "CharField[str | int | Combinable, str]") [assignment] @@ -250,7 +257,6 @@ posthog/hogql/printer.py:0: error: "FieldOrTable" has no attribute "name" [attr posthog/hogql/printer.py:0: error: "FieldOrTable" has no attribute "name" [attr-defined] posthog/hogql/printer.py:0: error: Argument 2 to "_get_materialized_column" of "_Printer" has incompatible type "str | int"; expected "str" [arg-type] posthog/hogql/printer.py:0: error: Argument 1 to "_print_identifier" of "_Printer" has incompatible type "str | None"; expected "str" [arg-type] -posthog/user_permissions.py:0: error: Key expression in dictionary comprehension has incompatible type "UUID"; expected type "int" [misc] posthog/user_permissions.py:0: error: Incompatible return value type (got "int", expected "Level | None") [return-value] posthog/user_permissions.py:0: error: Incompatible return value type (got "int", expected "Level | None") [return-value] posthog/user_permissions.py:0: error: Incompatible return value type (got "int", expected "RestrictionLevel") [return-value] @@ -258,7 +264,6 @@ posthog/tasks/update_survey_iteration.py:0: error: Incompatible types in assignm posthog/tasks/update_survey_iteration.py:0: error: Item "None" of "FeatureFlag | None" has no attribute "filters" [union-attr] posthog/tasks/update_survey_iteration.py:0: error: Item "None" of "FeatureFlag | None" has no attribute "filters" [union-attr] posthog/tasks/update_survey_iteration.py:0: error: Item "None" of "FeatureFlag | None" has no attribute "save" [union-attr] -posthog/tasks/update_survey_iteration.py:0: error: Incompatible type for "key" of "FeatureFlag" (got "UUID", expected "str | int | Combinable") [misc] posthog/permissions.py:0: error: Argument 2 to "feature_enabled" has incompatible type "str | None"; expected "str" [arg-type] posthog/models/event/util.py:0: error: Incompatible types in assignment (expression has type "str", variable has type "datetime") [assignment] posthog/models/event/util.py:0: error: Module has no attribute "utc" [attr-defined] @@ -279,7 +284,6 @@ posthog/demo/matrix/matrix.py:0: error: Name "timezone.datetime" is not defined posthog/demo/matrix/matrix.py:0: error: Name "timezone.datetime" is not defined [name-defined] posthog/demo/matrix/matrix.py:0: error: Name "timezone.datetime" is not defined [name-defined] posthog/api/shared.py:0: error: Incompatible return value type (got "int | None", expected "Level | None") [return-value] -ee/billing/quota_limiting.py:0: error: Argument 2 to "feature_enabled" has incompatible type "UUID"; expected "str" [arg-type] ee/billing/quota_limiting.py:0: error: Unsupported target for indexed assignment ("object") [index] ee/billing/quota_limiting.py:0: error: "object" has no attribute "get" [attr-defined] ee/billing/quota_limiting.py:0: error: Unsupported target for indexed assignment ("object") [index] @@ -325,7 +329,6 @@ posthog/models/property/util.py:0: error: Argument 1 to "append" of "list" has i posthog/models/property/util.py:0: error: Argument 1 to "append" of "list" has incompatible type "str | int"; expected "str" [arg-type] posthog/api/utils.py:0: error: Incompatible types in assignment (expression has type "type[EventDefinition]", variable has type "type[EnterpriseEventDefinition]") [assignment] posthog/api/utils.py:0: error: Argument 1 to "UUID" has incompatible type "int | str"; expected "str | None" [arg-type] -posthog/api/email_verification.py:0: error: Argument 2 to "feature_enabled" has incompatible type "UUID"; expected "str" [arg-type] posthog/queries/trends/util.py:0: error: Argument 1 to "translate_hogql" has incompatible type "str | None"; expected "str" [arg-type] posthog/hogql/property.py:0: error: Incompatible type for lookup 'id': (got "str | int | list[str]", expected "str | int") [misc] posthog/hogql/property.py:0: error: Incompatible type for lookup 'pk': (got "str | float", expected "str | int") [misc] @@ -369,18 +372,12 @@ ee/clickhouse/queries/funnels/funnel_correlation.py:0: error: Statement is unrea posthog/api/insight.py:0: error: Argument 1 to has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type] posthog/api/dashboards/dashboard.py:0: error: Argument 1 to "dashboard_queryset" of "DashboardTile" has incompatible type "DashboardTile_RelatedManager"; expected "QuerySet[Any, Any]" [arg-type] posthog/api/person.py:0: error: Argument 1 to has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type] -posthog/api/person.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/api/person.py:0: error: Argument 1 to "loads" has incompatible type "str | None"; expected "str | bytes | bytearray" [arg-type] -posthog/api/person.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/api/person.py:0: error: Argument "user" to "log_activity" has incompatible type "User | AnonymousUser"; expected "User | None" [arg-type] -posthog/api/person.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/api/person.py:0: error: Argument "user" to "log_activity" has incompatible type "User | AnonymousUser"; expected "User | None" [arg-type] -posthog/api/person.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/api/person.py:0: error: Cannot determine type of "group_properties_filter_group" [has-type] posthog/caching/insight_caching_state.py:0: error: Argument "params" to "execute" of "CursorWrapper" has incompatible type "list[object]"; expected "Sequence[bool | int | float | Decimal | str | <6 more items> | None] | Mapping[str, bool | int | float | Decimal | str | <6 more items> | None] | None" [arg-type] posthog/api/cohort.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] -posthog/api/cohort.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] -posthog/api/cohort.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/api/cohort.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] posthog/caching/insights_api.py:0: error: Unsupported operand types for >= ("datetime" and "None") [operator] posthog/caching/insights_api.py:0: note: Right operand is of type "datetime | None" @@ -388,8 +385,6 @@ posthog/api/feature_flag.py:0: error: Item "Sequence[Any]" of "Any | Sequence[An posthog/api/feature_flag.py:0: error: Item "None" of "Any | Sequence[Any] | None" has no attribute "filters" [union-attr] posthog/api/feature_flag.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] posthog/api/feature_flag.py:0: error: Argument 2 to "get_all_feature_flags" has incompatible type "str | None"; expected "str" [arg-type] -posthog/api/feature_flag.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] -posthog/api/feature_flag.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/hogql_queries/web_analytics/web_analytics_query_runner.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] posthog/hogql_queries/web_analytics/web_analytics_query_runner.py:0: error: Argument 1 to "append" of "list" has incompatible type "EventPropertyFilter"; expected "Expr" [arg-type] posthog/hogql_queries/insights/trends/trends_query_runner.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] @@ -413,8 +408,6 @@ posthog/hogql_queries/insights/lifecycle_query_runner.py:0: error: Item "SelectU posthog/hogql_queries/insights/lifecycle_query_runner.py:0: error: Item "None" of "JoinExpr | Any | None" has no attribute "sample" [union-attr] posthog/hogql_queries/insights/funnels/funnels_query_runner.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] posthog/api/survey.py:0: error: Incompatible types in assignment (expression has type "Any | Sequence[Any] | None", variable has type "Survey | None") [assignment] -posthog/api/survey.py:0: error: Argument "item_id" to "log_activity" has incompatible type "UUID"; expected "int | str | UUIDT | None" [arg-type] -posthog/api/survey.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/api/survey.py:0: error: Item "list[_ErrorFullDetails]" of "_FullDetailDict | list[_ErrorFullDetails] | dict[str, _ErrorFullDetails]" has no attribute "get" [union-attr] posthog/api/survey.py:0: error: Item "object" of "object | Any" has no attribute "__iter__" (not iterable) [union-attr] posthog/hogql_queries/web_analytics/web_overview.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] @@ -445,9 +438,6 @@ posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "tiles" [union-attr] posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "tiles" [union-attr] posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "delete" [union-attr] -posthog/test/activity_logging/test_activity_logging.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] -posthog/test/activity_logging/test_activity_logging.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] -posthog/test/activity_logging/test_activity_logging.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] @@ -607,7 +597,6 @@ posthog/api/organization_feature_flag.py:0: error: Invalid index type "str | Non posthog/api/organization_feature_flag.py:0: error: Invalid index type "str | None" for "dict[str, int]"; expected type "str" [index] posthog/api/organization_feature_flag.py:0: error: Invalid index type "str | None" for "dict[str, int]"; expected type "str" [index] posthog/api/notebook.py:0: error: Incompatible types in assignment (expression has type "int", variable has type "str | None") [assignment] -posthog/api/exports.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in assignment (expression has type "object", variable has type "DataWarehouseCredential | Combinable | None") [assignment] posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in assignment (expression has type "object", variable has type "str | int | Combinable") [assignment] posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in assignment (expression has type "dict[str, dict[str, str | bool]] | dict[str, str]", variable has type "dict[str, dict[str, str]]") [assignment] @@ -778,9 +767,6 @@ posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/tests/batch_exports/test_batch_exports.py:0: error: TypedDict key must be a string literal; expected one of ("_timestamp", "created_at", "distinct_id", "elements", "elements_chain", ...) [literal-required] -posthog/tasks/test/test_email.py:0: error: Argument 1 to "send_batch_export_run_failure" has incompatible type "UUID"; expected "str" [arg-type] -posthog/tasks/test/test_email.py:0: error: Argument 1 to "send_batch_export_run_failure" has incompatible type "UUID"; expected "str" [arg-type] -posthog/tasks/test/test_email.py:0: error: Argument 1 to "send_batch_export_run_failure" has incompatible type "UUID"; expected "str" [arg-type] posthog/session_recordings/session_recording_api.py:0: error: Argument "team_id" to "get_realtime_snapshots" has incompatible type "int"; expected "str" [arg-type] posthog/session_recordings/session_recording_api.py:0: error: Value of type variable "SupportsRichComparisonT" of "sorted" cannot be "str | None" [type-var] posthog/session_recordings/session_recording_api.py:0: error: Argument 1 to "get" of "dict" has incompatible type "str | None"; expected "str" [arg-type] @@ -822,10 +808,8 @@ posthog/warehouse/external_data_source/source.py:0: error: Argument 1 to "_creat posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py:0: error: Incompatible types in assignment (expression has type "str | int", variable has type "int") [assignment] posthog/api/sharing.py:0: error: Item "None" of "list[Any] | None" has no attribute "__iter__" (not iterable) [union-attr] posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "organization" [union-attr] -posthog/api/plugin.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID | Any"; expected "UUIDT | None" [arg-type] posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "id" [union-attr] posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "organization" [union-attr] -posthog/api/plugin.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID | Any"; expected "UUIDT | None" [arg-type] posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "id" [union-attr] posthog/api/plugin.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str | int | Combinable") [assignment] posthog/api/plugin.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str | int | Combinable") [assignment] @@ -835,10 +819,7 @@ posthog/api/plugin.py:0: error: Incompatible type for "file_name" of "PluginAtta posthog/api/plugin.py:0: error: Incompatible type for "file_size" of "PluginAttachment" (got "int | None", expected "float | int | str | Combinable") [misc] posthog/api/plugin.py:0: error: Item "None" of "IO[Any] | None" has no attribute "read" [union-attr] posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "organization" [union-attr] -posthog/api/plugin.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID | Any"; expected "UUIDT | None" [arg-type] posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "id" [union-attr] -posthog/api/plugin.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] -posthog/api/plugin.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] @@ -907,7 +888,6 @@ posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "s posthog/api/test/batch_exports/test_update.py:0: error: Unsupported target for indexed assignment ("Collection[str]") [index] posthog/api/test/batch_exports/test_update.py:0: error: Unsupported target for indexed assignment ("Collection[str]") [index] posthog/api/test/batch_exports/test_update.py:0: error: Dict entry 1 has incompatible type "str": "dict[str, Collection[str]]"; expected "str": "str" [dict-item] -posthog/api/test/batch_exports/test_update.py:0: error: Argument 3 to "get_batch_export_ok" has incompatible type "UUID"; expected "int" [arg-type] posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExport" is not indexable [index] posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExport" is not indexable [index] posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExport" is not indexable [index] diff --git a/package.json b/package.json index 44309818ccdb6..ca21e923e23ad 100644 --- a/package.json +++ b/package.json @@ -66,7 +66,6 @@ "mypy-baseline-sync": "mypy -p posthog | mypy-baseline sync" }, "dependencies": { - "@ant-design/icons": "^4.7.0", "@babel/runtime": "^7.24.0", "@dnd-kit/core": "^6.0.8", "@dnd-kit/modifiers": "^6.0.1", @@ -77,7 +76,7 @@ "@medv/finder": "^3.1.0", "@microlink/react-json-view": "^1.21.3", "@monaco-editor/react": "4.6.0", - "@posthog/hogvm": "^1.0.44", + "@posthog/hogvm": "^1.0.47", "@posthog/icons": "0.8.1", "@posthog/plugin-scaffold": "^1.4.4", "@react-hook/size": "^2.1.2", @@ -150,7 +149,7 @@ "pmtiles": "^2.11.0", "postcss": "^8.4.31", "postcss-preset-env": "^9.3.0", - "posthog-js": "1.160.3", + "posthog-js": "1.161.1", "posthog-js-lite": "3.0.0", "prettier": "^2.8.8", "prop-types": "^15.7.2", diff --git a/plugin-server/functional_tests/definitions.test.ts b/plugin-server/functional_tests/definitions.test.ts deleted file mode 100644 index 574fdf5d091ba..0000000000000 --- a/plugin-server/functional_tests/definitions.test.ts +++ /dev/null @@ -1,160 +0,0 @@ -import { UUIDT } from '../src/utils/utils' -import { capture, createOrganization, createTeam, getPropertyDefinitions } from './api' -import { waitForExpect } from './expectations' - -let organizationId: string - -beforeAll(async () => { - organizationId = await createOrganization() -}) - -test.concurrent(`event ingestion: definition for string property %p`, async () => { - const teamId = await createTeam(organizationId) - const distinctId = 'distinctId' - const uuid = new UUIDT().toString() - - await capture({ - teamId, - distinctId, - uuid, - event: 'custom event', - properties: { - property: 'hehe', - }, - }) - - await waitForExpect(async () => { - const propertyDefinitions = await getPropertyDefinitions(teamId) - expect(propertyDefinitions).toContainEqual( - expect.objectContaining({ - name: 'property', - is_numerical: false, - property_type: 'String', - }) - ) - }) -}) - -test.concurrent.each([[2], [2.1234], ['2'], ['2.1234']])( - `event ingestion: definition for number property as number %p`, - async (numberValue: any) => { - const teamId = await createTeam(organizationId) - const distinctId = 'distinctId' - const uuid = new UUIDT().toString() - - await capture({ - teamId, - distinctId, - uuid, - event: 'custom event', - properties: { - property: numberValue, - }, - }) - - await waitForExpect(async () => { - const propertyDefinitions = await getPropertyDefinitions(teamId) - expect(propertyDefinitions).toContainEqual( - expect.objectContaining({ - name: 'property', - is_numerical: typeof numberValue === 'number', - property_type: typeof numberValue === 'number' ? 'Numeric' : 'String', - }) - ) - }) - } -) - -test.concurrent.each([ - ['01/01/2020 00:00:00'], - ['01-01-2020 00:00:00'], - ['2020/01/01 00:00:00'], - ['2020-01-01T00:00:00Z'], - ['2020-01-01 00:00:00'], - ['2020-01-01'], -])(`event ingestion: definition for date/datetime property should be datetime %p`, async (dateString: string) => { - const teamId = await createTeam(organizationId) - const distinctId = 'distinctId' - const uuid = new UUIDT().toString() - - await capture({ - teamId, - distinctId, - uuid, - event: 'custom event', - properties: { - property: dateString, - }, - }) - - await waitForExpect(async () => { - const propertyDefinitions = await getPropertyDefinitions(teamId) - expect(propertyDefinitions).toContainEqual( - expect.objectContaining({ - name: 'property', - is_numerical: false, - property_type: 'DateTime', - }) - ) - }) -}) - -test.concurrent.each([[true], ['true']])( - `event ingestion: definition for boolean property %p`, - async (booleanValue: any) => { - const teamId = await createTeam(organizationId) - const distinctId = 'distinctId' - const uuid = new UUIDT().toString() - - await capture({ - teamId, - distinctId, - uuid, - event: 'custom event', - properties: { - property: booleanValue, - }, - }) - - await waitForExpect(async () => { - const propertyDefinitions = await getPropertyDefinitions(teamId) - expect(propertyDefinitions).toContainEqual( - expect.objectContaining({ - name: 'property', - is_numerical: false, - property_type: 'Boolean', - }) - ) - }) - } -) - -test.concurrent.each([['utm_abc'], ['utm_123']])( - `event ingestion: utm properties should always be strings`, - async (propertyName: string) => { - const teamId = await createTeam(organizationId) - const distinctId = 'distinctId' - const uuid = new UUIDT().toString() - - await capture({ - teamId, - distinctId, - uuid, - event: 'custom event', - properties: { - [propertyName]: 1234, - }, - }) - - await waitForExpect(async () => { - const propertyDefinitions = await getPropertyDefinitions(teamId) - expect(propertyDefinitions).toContainEqual( - expect.objectContaining({ - name: propertyName, - is_numerical: false, - property_type: 'String', - }) - ) - }) - } -) diff --git a/plugin-server/package.json b/plugin-server/package.json index 75a137b2e1924..452011ada2a02 100644 --- a/plugin-server/package.json +++ b/plugin-server/package.json @@ -23,7 +23,8 @@ "prettier:check": "prettier --check .", "prepublishOnly": "pnpm build", "setup:dev:clickhouse": "cd .. && DEBUG=1 python manage.py migrate_clickhouse", - "setup:test": "cd .. && TEST=1 python manage.py setup_test_environment", + "setup:test": "cd .. && TEST=1 python manage.py setup_test_environment && cd plugin-server && pnpm run setup:test:cyclotron", + "setup:test:cyclotron": "CYCLOTRON_DATABASE_NAME=test_cyclotron ../rust/bin/migrate-cyclotron", "services:start": "cd .. && docker compose -f docker-compose.dev.yml up", "services:stop": "cd .. && docker compose -f docker-compose.dev.yml down", "services:clean": "cd .. && docker compose -f docker-compose.dev.yml rm -v", @@ -53,7 +54,7 @@ "@maxmind/geoip2-node": "^3.4.0", "@posthog/clickhouse": "^1.7.0", "@posthog/cyclotron": "file:../rust/cyclotron-node", - "@posthog/hogvm": "^1.0.44", + "@posthog/hogvm": "^1.0.47", "@posthog/plugin-scaffold": "1.4.4", "@sentry/node": "^7.49.0", "@sentry/profiling-node": "^0.3.0", diff --git a/plugin-server/pnpm-lock.yaml b/plugin-server/pnpm-lock.yaml index 08f08046ad14c..9a36386f4c602 100644 --- a/plugin-server/pnpm-lock.yaml +++ b/plugin-server/pnpm-lock.yaml @@ -47,8 +47,8 @@ dependencies: specifier: file:../rust/cyclotron-node version: file:../rust/cyclotron-node '@posthog/hogvm': - specifier: ^1.0.44 - version: 1.0.44(luxon@3.4.4) + specifier: ^1.0.47 + version: 1.0.47(luxon@3.4.4) '@posthog/plugin-scaffold': specifier: 1.4.4 version: 1.4.4 @@ -3116,8 +3116,8 @@ packages: engines: {node: '>=12'} dev: false - /@posthog/hogvm@1.0.44(luxon@3.4.4): - resolution: {integrity: sha512-Ss7gTPyvPyviNipVQOqnsCa66IMmMf+DEg7iX/vQMcWDuFwvHNbdze1iwFVoXCjLci+h8SW2rOMPB0S5A2jJXg==} + /@posthog/hogvm@1.0.47(luxon@3.4.4): + resolution: {integrity: sha512-6zoMKH6fmO9evuhpd9dtkVi2GFk+dGsxcHe6l6mMo7yXd0Kwssmiwr8aLndYTsjdhRkVMi7iaoi/GLrh8F9ayQ==} peerDependencies: luxon: ^3.4.4 dependencies: @@ -10736,5 +10736,4 @@ packages: file:../rust/cyclotron-node: resolution: {directory: ../rust/cyclotron-node, type: directory} name: '@posthog/cyclotron' - version: 0.1.0 dev: false diff --git a/plugin-server/src/cdp/cdp-api.ts b/plugin-server/src/cdp/cdp-api.ts index 34de05942471e..cfc70e7f1b8fc 100644 --- a/plugin-server/src/cdp/cdp-api.ts +++ b/plugin-server/src/cdp/cdp-api.ts @@ -9,7 +9,7 @@ import { HogExecutor } from './hog-executor' import { HogFunctionManager } from './hog-function-manager' import { HogWatcher, HogWatcherState } from './hog-watcher' import { HogFunctionInvocationResult, HogFunctionType, LogEntry } from './types' -import { createInvocation } from './utils' +import { createInvocation, queueBlobToString } from './utils' export class CdpApi { private hogExecutor: HogExecutor @@ -144,11 +144,19 @@ export class CdpApi { if (invocation.queue === 'fetch') { if (mock_async_functions) { // Add the state, simulating what executeAsyncResponse would do + + // Re-parse the fetch args for the logging + const fetchArgs = { + ...invocation.queueParameters, + body: queueBlobToString(invocation.queueBlob), + } + response = { invocation: { ...invocation, queue: 'hog', - queueParameters: { response: { status: 200, body: {} } }, + queueParameters: { response: { status: 200 } }, + queueBlob: Buffer.from('{}'), }, finished: false, logs: [ @@ -160,7 +168,7 @@ export class CdpApi { { level: 'info', timestamp: DateTime.now(), - message: `fetch(${JSON.stringify(invocation.queueParameters, null, 2)})`, + message: `fetch(${JSON.stringify(fetchArgs, null, 2)})`, }, ], } diff --git a/plugin-server/src/cdp/cdp-consumers.ts b/plugin-server/src/cdp/cdp-consumers.ts index 499b030e61564..f75b2a23096e5 100644 --- a/plugin-server/src/cdp/cdp-consumers.ts +++ b/plugin-server/src/cdp/cdp-consumers.ts @@ -1,8 +1,9 @@ -import cyclotron from '@posthog/cyclotron' +import { CyclotronJob, CyclotronManager, CyclotronWorker } from '@posthog/cyclotron' import { captureException } from '@sentry/node' import { Message } from 'node-rdkafka' import { Counter, Histogram } from 'prom-client' +import { buildIntegerMatcher } from '../config/config' import { KAFKA_APP_METRICS_2, KAFKA_CDP_FUNCTION_CALLBACKS, @@ -14,7 +15,15 @@ import { BatchConsumer, startBatchConsumer } from '../kafka/batch-consumer' import { createRdConnectionConfigFromEnvVars } from '../kafka/config' import { addSentryBreadcrumbsEventListeners } from '../main/ingestion-queues/kafka-metrics' import { runInstrumentedFunction } from '../main/utils' -import { AppMetric2Type, Hub, PluginServerService, RawClickHouseEvent, TeamId, TimestampFormat } from '../types' +import { + AppMetric2Type, + Hub, + PluginServerService, + RawClickHouseEvent, + TeamId, + TimestampFormat, + ValueMatcher, +} from '../types' import { createKafkaProducerWrapper } from '../utils/db/hub' import { KafkaProducerWrapper } from '../utils/db/kafka-producer-wrapper' import { captureTeamEvent } from '../utils/posthog' @@ -31,6 +40,7 @@ import { CdpRedis, createCdpRedisPool } from './redis' import { HogFunctionInvocation, HogFunctionInvocationGlobals, + HogFunctionInvocationQueueParameters, HogFunctionInvocationResult, HogFunctionInvocationSerialized, HogFunctionInvocationSerializedCompressed, @@ -44,7 +54,7 @@ import { createInvocation, gzipObject, prepareLogEntriesForClickhouse, - serializeInvocation, + serializeHogFunctionInvocation, unGzipObject, } from './utils' @@ -88,8 +98,6 @@ abstract class CdpConsumerBase { protected kafkaProducer?: KafkaProducerWrapper protected abstract name: string - protected abstract topic: string - protected abstract consumerGroupId: string protected heartbeat = () => {} @@ -108,7 +116,7 @@ abstract class CdpConsumerBase { public get service(): PluginServerService { return { - id: this.consumerGroupId, + id: this.name, onShutdown: async () => await this.stop(), healthcheck: () => this.isHealthy() ?? false, batchConsumer: this.batchConsumer, @@ -156,8 +164,6 @@ abstract class CdpConsumerBase { return results } - protected abstract _handleKafkaBatch(messages: Message[]): Promise - protected async produceQueuedMessages() { const messages = [...this.messagesToProduce] this.messagesToProduce = [] @@ -205,20 +211,23 @@ abstract class CdpConsumerBase { }) } - protected async queueInvocations(invocation: HogFunctionInvocation[]) { + // NOTE: These will be removed once we are only on Cyclotron + protected async queueInvocationsToKafka(invocation: HogFunctionInvocation[]) { await Promise.all( invocation.map(async (item) => { - await this.queueInvocation(item) + await this.queueInvocationToKafka(item) }) ) } - protected async queueInvocation(invocation: HogFunctionInvocation) { - // TODO: Add cylcotron check here and enqueue that way - // For now we just enqueue to kafka - // For kafka style this is overkill to enqueue this way but it simplifies migrating to the new system + protected async queueInvocationToKafka(invocation: HogFunctionInvocation) { + // NOTE: WE keep the queueParams args as kafka land still needs them + const serializedInvocation: HogFunctionInvocationSerialized = { + ...invocation, + hogFunctionId: invocation.hogFunction.id, + } - const serializedInvocation = serializeInvocation(invocation) + delete (serializedInvocation as any).hogFunction const request: HogFunctionInvocationSerializedCompressed = { state: await gzipObject(serializedInvocation), @@ -234,14 +243,22 @@ abstract class CdpConsumerBase { } protected async processInvocationResults(results: HogFunctionInvocationResult[]): Promise { - await runInstrumentedFunction({ + return await runInstrumentedFunction({ statsKey: `cdpConsumer.handleEachBatch.produceResults`, func: async () => { - console.log('Processing invocations results', results.length) + await this.hogWatcher.observeResults(results) await Promise.all( results.map(async (result) => { - // Tricky: We want to pull all the logs out as we don't want them to be passed around to any subsequent functions + if (result.finished || result.error) { + this.produceAppMetric({ + team_id: result.invocation.teamId, + app_source_id: result.invocation.hogFunction.id, + metric_kind: result.error ? 'failure' : 'success', + metric_name: result.error ? 'failed' : 'succeeded', + count: 1, + }) + } this.produceLogs(result) @@ -260,30 +277,20 @@ abstract class CdpConsumerBase { key: `${team!.api_token}:${event.distinct_id}`, }) } - - if (result.finished || result.error) { - this.produceAppMetric({ - team_id: result.invocation.teamId, - app_source_id: result.invocation.hogFunction.id, - metric_kind: result.error ? 'failure' : 'success', - metric_name: result.error ? 'failed' : 'succeeded', - count: 1, - }) - } else { - // Means there is follow up so we enqueue it - await this.queueInvocation(result.invocation) - } }) ) }, }) } - protected async startKafkaConsumer() { + protected async startKafkaConsumer(options: { + topic: string + groupId: string + handleBatch: (messages: Message[]) => Promise + }): Promise { this.batchConsumer = await startBatchConsumer({ + ...options, connectionConfig: createRdConnectionConfigFromEnvVars(this.hub), - groupId: this.consumerGroupId, - topic: this.topic, autoCommit: true, sessionTimeout: this.hub.KAFKA_CONSUMPTION_SESSION_TIMEOUT_MS, maxPollIntervalMs: this.hub.KAFKA_CONSUMPTION_MAX_POLL_INTERVAL_MS, @@ -314,7 +321,7 @@ abstract class CdpConsumerBase { statsKey: `cdpConsumer.handleEachBatch`, sendTimeoutGuardToSentry: false, func: async () => { - await this._handleKafkaBatch(messages) + await options.handleBatch(messages) }, }) }, @@ -324,6 +331,9 @@ abstract class CdpConsumerBase { addSentryBreadcrumbsEventListeners(this.batchConsumer.consumer) this.batchConsumer.consumer.on('disconnected', async (err) => { + if (!this.isStopping) { + return + } // since we can't be guaranteed that the consumer will be stopped before some other code calls disconnect // we need to listen to disconnect and make sure we're stopped status.info('🔁', `${this.name} batch consumer disconnected, cleaning up`, { err }) @@ -335,15 +345,11 @@ abstract class CdpConsumerBase { // NOTE: This is only for starting shared services await Promise.all([ this.hogFunctionManager.start(), - this.hub.CYCLOTRON_DATABASE_URL - ? cyclotron.initManager({ shards: [{ dbUrl: this.hub.CYCLOTRON_DATABASE_URL }] }) - : Promise.resolve(), + createKafkaProducerWrapper(this.hub).then((producer) => { + this.kafkaProducer = producer + this.kafkaProducer.producer.connect() + }), ]) - - this.kafkaProducer = await createKafkaProducerWrapper(this.hub) - this.kafkaProducer.producer.connect() - - await this.startKafkaConsumer() } public async stop(): Promise { @@ -362,20 +368,27 @@ abstract class CdpConsumerBase { } public isHealthy() { - // TODO: Check either kafka consumer or cyclotron worker exists - // and that whatever exists is healthy return this.batchConsumer?.isHealthy() } } /** * This consumer handles incoming events from the main clickhouse topic + * Currently it produces to both kafka and Cyclotron based on the team */ - export class CdpProcessedEventsConsumer extends CdpConsumerBase { protected name = 'CdpProcessedEventsConsumer' - protected topic = KAFKA_EVENTS_JSON - protected consumerGroupId = 'cdp-processed-events-consumer' + private cyclotronMatcher: ValueMatcher + private cyclotronManager?: CyclotronManager + + constructor(hub: Hub) { + super(hub) + this.cyclotronMatcher = buildIntegerMatcher(hub.CDP_CYCLOTRON_ENABLED_TEAMS, true) + } + + private cyclotronEnabled(invocation: HogFunctionInvocation): boolean { + return !!(this.cyclotronManager && this.cyclotronMatcher(invocation.globals.project.id)) + } public async processBatch(invocationGlobals: HogFunctionInvocationGlobals[]): Promise { if (!invocationGlobals.length) { @@ -386,23 +399,48 @@ export class CdpProcessedEventsConsumer extends CdpConsumerBase { this.createHogFunctionInvocations(invocationGlobals) ) - if (this.hub.CDP_EVENT_PROCESSOR_EXECUTE_FIRST_STEP) { - // NOTE: This is for testing the two ways of enqueueing processing. It will be swapped out for a cyclotron env check - // Kafka based workflow + // Split out the cyclotron invocations + const [cyclotronInvocations, kafkaInvocations] = invocationsToBeQueued.reduce( + (acc, item) => { + if (this.cyclotronEnabled(item)) { + acc[0].push(item) + } else { + acc[1].push(item) + } + + return acc + }, + [[], []] as [HogFunctionInvocation[], HogFunctionInvocation[]] + ) + + // For the cyclotron ones we simply create the jobs + await Promise.all( + cyclotronInvocations.map((item) => + this.cyclotronManager?.createJob({ + teamId: item.globals.project.id, + functionId: item.hogFunction.id, + queueName: 'hog', + priority: item.priority, + vmState: serializeHogFunctionInvocation(item), + }) + ) + ) + + if (kafkaInvocations.length) { + // As we don't want to over-produce to kafka we invoke the hog functions and then queue the results const invocationResults = await runInstrumentedFunction({ statsKey: `cdpConsumer.handleEachBatch.executeInvocations`, func: async () => { - const hogResults = await this.runManyWithHeartbeat(invocationsToBeQueued, (item) => + const hogResults = await this.runManyWithHeartbeat(kafkaInvocations, (item) => this.hogExecutor.execute(item) ) return [...hogResults] }, }) - await this.hogWatcher.observeResults(invocationResults) await this.processInvocationResults(invocationResults) - } else { - await this.queueInvocations(invocationsToBeQueued) + const newInvocations = invocationResults.filter((r) => !r.finished).map((r) => r.invocation) + await this.queueInvocationsToKafka(newInvocations) } await this.produceQueuedMessages() @@ -413,7 +451,6 @@ export class CdpProcessedEventsConsumer extends CdpConsumerBase { /** * Finds all matching hog functions for the given globals. * Filters them for their disabled state as well as masking configs - * */ protected async createHogFunctionInvocations( invocationGlobals: HogFunctionInvocationGlobals[] @@ -446,8 +483,10 @@ export class CdpProcessedEventsConsumer extends CdpConsumerBase { }) const states = await this.hogWatcher.getStates(possibleInvocations.map((x) => x.hogFunction.id)) + const validInvocations: HogFunctionInvocation[] = [] - const notDisabledInvocations = possibleInvocations.filter((item) => { + // Iterate over adding them to the list and updating their priority + possibleInvocations.forEach((item) => { const state = states[item.hogFunction.id].state if (state >= HogWatcherState.disabledForPeriod) { this.produceAppMetric({ @@ -460,15 +499,19 @@ export class CdpProcessedEventsConsumer extends CdpConsumerBase { : 'disabled_permanently', count: 1, }) - return false + return + } + + if (state === HogWatcherState.degraded) { + item.priority = 2 } - return true + validInvocations.push(item) }) // Now we can filter by masking configs const { masked, notMasked: notMaskedInvocations } = await this.hogMasker.filterByMasking( - notDisabledInvocations + validInvocations ) masked.forEach((item) => { @@ -527,15 +570,28 @@ export class CdpProcessedEventsConsumer extends CdpConsumerBase { await this.processBatch(invocationGlobals) } + + public async start(): Promise { + await super.start() + await this.startKafkaConsumer({ + topic: KAFKA_EVENTS_JSON, + groupId: 'cdp-processed-events-consumer', + handleBatch: (messages) => this._handleKafkaBatch(messages), + }) + + this.cyclotronManager = this.hub.CYCLOTRON_DATABASE_URL + ? new CyclotronManager({ shards: [{ dbUrl: this.hub.CYCLOTRON_DATABASE_URL }] }) + : undefined + + await this.cyclotronManager?.connect() + } } /** - * This consumer handles actually invoking hog in a loop + * This consumer only deals with kafka messages and will eventually be replaced by the Cyclotron worker */ export class CdpFunctionCallbackConsumer extends CdpConsumerBase { protected name = 'CdpFunctionCallbackConsumer' - protected topic = KAFKA_CDP_FUNCTION_CALLBACKS - protected consumerGroupId = 'cdp-function-callback-consumer' public async processBatch(invocations: HogFunctionInvocation[]): Promise { if (!invocations.length) { @@ -565,8 +621,9 @@ export class CdpFunctionCallbackConsumer extends CdpConsumerBase { }, }) - await this.hogWatcher.observeResults(invocationResults) await this.processInvocationResults(invocationResults) + const newInvocations = invocationResults.filter((r) => !r.finished).map((r) => r.invocation) + await this.queueInvocationsToKafka(newInvocations) await this.produceQueuedMessages() } @@ -635,12 +692,6 @@ export class CdpFunctionCallbackConsumer extends CdpConsumerBase { }) ) - invocations.forEach((item) => { - if (!item.hogFunction?.id) { - console.error('No hog function id', item) - } - }) - return invocations }, }) @@ -648,52 +699,143 @@ export class CdpFunctionCallbackConsumer extends CdpConsumerBase { await this.processBatch(events) } + + public async start(): Promise { + await super.start() + await this.startKafkaConsumer({ + topic: KAFKA_CDP_FUNCTION_CALLBACKS, + groupId: 'cdp-function-callback-consumer', + handleBatch: (messages) => this._handleKafkaBatch(messages), + }) + } } -// // TODO: Split out non-Kafka specific parts of CdpConsumerBase so that it can be used by the -// // Cyclotron worker below. Or maybe we can just wait, and rip the Kafka bits out once Cyclotron is -// // shipped (and rename it something other than consumer, probably). For now, this is an easy way to -// // use existing code and get an end-to-end demo shipped. -// export class CdpCyclotronWorker extends CdpFunctionCallbackConsumer { -// protected name = 'CdpCyclotronWorker' -// protected topic = 'UNUSED-CdpCyclotronWorker' -// protected consumerGroupId = 'UNUSED-CdpCyclotronWorker' -// private runningWorker: Promise | undefined -// private isUnhealthy = false - -// private async innerStart() { -// try { -// const limit = 100 // TODO: Make configurable. -// while (!this.isStopping) { -// const jobs = await cyclotron.dequeueJobsWithVmState('hog', limit) -// // TODO: Decode jobs into the right types - -// await this.processBatch(jobs) -// } -// } catch (err) { -// this.isUnhealthy = true -// console.error('Error in Cyclotron worker', err) -// throw err -// } -// } - -// public async start() { -// await cyclotron.initManager({ shards: [{ dbUrl: this.hub.CYCLOTRON_DATABASE_URL }] }) -// await cyclotron.initWorker({ dbUrl: this.hub.CYCLOTRON_DATABASE_URL }) - -// // Consumer `start` expects an async task is started, and not that `start` itself blocks -// // indefinitely. -// this.runningWorker = this.innerStart() - -// return Promise.resolve() -// } - -// public async stop() { -// await super.stop() -// await this.runningWorker -// } - -// public isHealthy() { -// return this.isUnhealthy -// } -// } +/** + * The future of the CDP consumer. This will be the main consumer that will handle all hog jobs from Cyclotron + */ +export class CdpCyclotronWorker extends CdpConsumerBase { + protected name = 'CdpCyclotronWorker' + private cyclotronWorker?: CyclotronWorker + private runningWorker: Promise | undefined + protected queue: 'hog' | 'fetch' = 'hog' + + public async processBatch(invocations: HogFunctionInvocation[]): Promise { + if (!invocations.length) { + return + } + + const invocationResults = await runInstrumentedFunction({ + statsKey: `cdpConsumer.handleEachBatch.executeInvocations`, + func: async () => { + // NOTE: In the future this service will never do fetching (unless we decide we want to do it in node at some point) + // This is just "for now" to support the transition to cyclotron + const fetchQueue = invocations.filter((item) => item.queue === 'fetch') + const fetchResults = await this.runManyWithHeartbeat(fetchQueue, (item) => + this.fetchExecutor.execute(item) + ) + + const hogQueue = invocations.filter((item) => item.queue === 'hog') + const hogResults = await this.runManyWithHeartbeat(hogQueue, (item) => this.hogExecutor.execute(item)) + return [...hogResults, ...(fetchResults.filter(Boolean) as HogFunctionInvocationResult[])] + }, + }) + + await this.processInvocationResults(invocationResults) + await this.updateJobs(invocationResults) + await this.produceQueuedMessages() + } + + private async updateJobs(invocations: HogFunctionInvocationResult[]) { + await Promise.all( + invocations.map(async (item) => { + const id = item.invocation.id + if (item.error) { + status.debug('⚡️', 'Updating job to failed', id) + this.cyclotronWorker?.updateJob(id, 'failed') + } else if (item.finished) { + status.debug('⚡️', 'Updating job to completed', id) + this.cyclotronWorker?.updateJob(id, 'completed') + } else { + status.debug('⚡️', 'Updating job to available', id) + this.cyclotronWorker?.updateJob(id, 'available', { + priority: item.invocation.priority, + vmState: serializeHogFunctionInvocation(item.invocation), + queueName: item.invocation.queue, + parameters: item.invocation.queueParameters ?? null, + blob: item.invocation.queueBlob ?? null, + }) + } + await this.cyclotronWorker?.flushJob(id) + }) + ) + } + + private async handleJobBatch(jobs: CyclotronJob[]) { + const invocations: HogFunctionInvocation[] = [] + + for (const job of jobs) { + // NOTE: This is all a bit messy and might be better to refactor into a helper + if (!job.functionId) { + throw new Error('Bad job: ' + JSON.stringify(job)) + } + const hogFunction = this.hogFunctionManager.getHogFunction(job.functionId) + + if (!hogFunction) { + // Here we need to mark the job as failed + + status.error('Error finding hog function', { + id: job.functionId, + }) + this.cyclotronWorker?.updateJob(job.id, 'failed') + await this.cyclotronWorker?.flushJob(job.id) + continue + } + + const parsedState = job.vmState as HogFunctionInvocationSerialized + + invocations.push({ + id: job.id, + globals: parsedState.globals, + teamId: hogFunction.team_id, + hogFunction, + priority: job.priority, + queue: (job.queueName as any) ?? 'hog', + queueParameters: job.parameters as HogFunctionInvocationQueueParameters | undefined, + queueBlob: job.blob ?? undefined, + vmState: parsedState.vmState, + timings: parsedState.timings, + }) + } + + await this.processBatch(invocations) + } + + public async start() { + await super.start() + + this.cyclotronWorker = new CyclotronWorker({ + pool: { dbUrl: this.hub.CYCLOTRON_DATABASE_URL }, + queueName: this.queue, + includeVmState: true, + batchMaxSize: this.hub.CDP_CYCLOTRON_BATCH_SIZE, + pollDelayMs: this.hub.CDP_CYCLOTRON_BATCH_DELAY_MS, + }) + await this.cyclotronWorker.connect((jobs) => this.handleJobBatch(jobs)) + } + + public async stop() { + await super.stop() + await this.cyclotronWorker?.disconnect() + await this.runningWorker + } + + public isHealthy() { + return this.cyclotronWorker?.isHealthy() ?? false + } +} + +// Mostly used for testing +export class CdpCyclotronWorkerFetch extends CdpCyclotronWorker { + protected name = 'CdpCyclotronWorkerFetch' + protected queue = 'fetch' as const +} diff --git a/plugin-server/src/cdp/fetch-executor.ts b/plugin-server/src/cdp/fetch-executor.ts index 89900215ec1fd..8907fafc35239 100644 --- a/plugin-server/src/cdp/fetch-executor.ts +++ b/plugin-server/src/cdp/fetch-executor.ts @@ -12,7 +12,7 @@ import { HogFunctionQueueParametersFetchRequest, HogFunctionQueueParametersFetchResponse, } from './types' -import { gzipObject, serializeInvocation } from './utils' +import { gzipObject, queueBlobToString, serializeHogFunctionInvocation } from './utils' export const BUCKETS_KB_WRITTEN = [0, 128, 512, 1024, 2024, 4096, 10240, Infinity] @@ -40,19 +40,22 @@ export class FetchExecutor { async execute(invocation: HogFunctionInvocation): Promise { if (invocation.queue !== 'fetch' || !invocation.queueParameters) { - throw new Error('Bad invocation') + status.error('🦔', `[HogExecutor] Bad invocation`, { invocation }) + return } const params = invocation.queueParameters as HogFunctionQueueParametersFetchRequest - if (params.body) { - histogramFetchPayloadSize.observe(params.body.length / 1024) + + const body = queueBlobToString(invocation.queueBlob) + if (body) { + histogramFetchPayloadSize.observe(body.length / 1024) } try { if (this.hogHookEnabledForTeams(invocation.teamId)) { // This is very temporary until we are commited to Cyclotron const payload: HogFunctionInvocationAsyncRequest = { - state: await gzipObject(serializeInvocation(invocation)), + state: await gzipObject(serializeHogFunctionInvocation(invocation)), teamId: invocation.teamId, hogFunctionId: invocation.hogFunction.id, asyncFunctionRequest: { @@ -61,6 +64,7 @@ export class FetchExecutor { params.url, { ...params, + body, }, ], }, @@ -88,11 +92,12 @@ export class FetchExecutor { } const params = invocation.queueParameters as HogFunctionQueueParametersFetchRequest + const body = queueBlobToString(invocation.queueBlob) || '' + let responseBody = '' const resParams: HogFunctionQueueParametersFetchResponse = { response: { status: 0, - body: {}, }, error: null, timings: [], @@ -102,17 +107,12 @@ export class FetchExecutor { const start = performance.now() const fetchResponse = await trackedFetch(params.url, { method: params.method, - body: params.body, + body, headers: params.headers, timeout: this.serverConfig.EXTERNAL_REQUEST_TIMEOUT_MS, }) - let responseBody = await fetchResponse.text() - try { - responseBody = JSON.parse(responseBody) - } catch (err) { - // Ignore - } + responseBody = await fetchResponse.text() const duration = performance.now() - start @@ -123,7 +123,6 @@ export class FetchExecutor { resParams.response = { status: fetchResponse.status, - body: responseBody, } } catch (err) { status.error('🦔', `[HogExecutor] Error during fetch`, { error: String(err) }) @@ -135,6 +134,7 @@ export class FetchExecutor { ...invocation, queue: 'hog', queueParameters: resParams, + queueBlob: Buffer.from(responseBody), }, finished: false, logs: [], diff --git a/plugin-server/src/cdp/hog-executor.ts b/plugin-server/src/cdp/hog-executor.ts index 382f6b3fc3549..28bad8e38099a 100644 --- a/plugin-server/src/cdp/hog-executor.ts +++ b/plugin-server/src/cdp/hog-executor.ts @@ -14,7 +14,7 @@ import { HogFunctionQueueParametersFetchResponse, HogFunctionType, } from './types' -import { convertToHogFunctionFilterGlobal } from './utils' +import { convertToHogFunctionFilterGlobal, queueBlobToString } from './utils' const MAX_ASYNC_STEPS = 2 const MAX_HOG_LOGS = 10 @@ -153,25 +153,33 @@ export class HogExecutor { try { // If the queueParameter is set then we have an expected format that we want to parse and add to the stack if (invocation.queueParameters) { + // NOTE: This is all based around the only response type being fetch currently const { logs = [], response = null, error, timings = [], } = invocation.queueParameters as HogFunctionQueueParametersFetchResponse + let responseBody: any = undefined + if (response) { + // Convert from buffer to string + responseBody = queueBlobToString(invocation.queueBlob) + } // Reset the queue parameters to be sure invocation.queue = 'hog' invocation.queueParameters = undefined + invocation.queueBlob = undefined + + const status = typeof response?.status === 'number' ? response.status : 503 // Special handling for fetch - // TODO: Would be good to have a dedicated value in the fetch response for the status code - if (response?.status && response.status >= 400) { + if (status >= 400) { // Generic warn log for bad status codes logs.push({ level: 'warn', timestamp: DateTime.now(), - message: `Fetch returned bad status: ${response.status}`, + message: `Fetch returned bad status: ${status}`, }) } @@ -183,16 +191,22 @@ export class HogExecutor { throw new Error(error) } - if (typeof response?.body === 'string') { + if (typeof responseBody === 'string') { try { - response.body = JSON.parse(response.body) + responseBody = JSON.parse(responseBody) } catch (e) { // pass - if it isn't json we just pass it on } } + // Finally we create the response object as the VM expects + const fetchResponse = { + status, + body: responseBody, + } + // Add the response to the stack to continue execution - invocation.vmState!.stack.push(response) + invocation.vmState!.stack.push(fetchResponse) invocation.timings.push(...timings) result.logs = [...logs, ...result.logs] } @@ -327,18 +341,22 @@ export class HogExecutor { const headers = fetchOptions?.headers || { 'Content-Type': 'application/json', } - let body = fetchOptions?.body // Modify the body to ensure it is a string (we allow Hog to send an object to keep things simple) - body = body ? (typeof body === 'string' ? body : JSON.stringify(body)) : body + const body: string | undefined = fetchOptions?.body + ? typeof fetchOptions.body === 'string' + ? fetchOptions.body + : JSON.stringify(fetchOptions.body) + : fetchOptions?.body result.invocation.queue = 'fetch' result.invocation.queueParameters = { url, method, headers, - body, + return_queue: 'hog', } - + // The payload is always blob encoded + result.invocation.queueBlob = body ? Buffer.from(body) : undefined break default: throw new Error(`Unknown async function '${execRes.asyncFunctionName}'`) @@ -366,6 +384,7 @@ export class HogExecutor { } } catch (err) { result.error = err.message + result.finished = true // Explicitly set to true to prevent infinite loops status.error( '🦔', `[HogExecutor] Error executing function ${invocation.hogFunction.id} - ${invocation.hogFunction.name}`, diff --git a/plugin-server/src/cdp/hog-function-manager.ts b/plugin-server/src/cdp/hog-function-manager.ts index d356e6d66ce10..94803e209f25e 100644 --- a/plugin-server/src/cdp/hog-function-manager.ts +++ b/plugin-server/src/cdp/hog-function-manager.ts @@ -95,6 +95,7 @@ export class HogFunctionManager { if (!this.ready) { throw new Error('HogFunctionManager is not ready! Run HogFunctionManager.start() before this') } + return this.cache.functions[id] } @@ -102,6 +103,7 @@ export class HogFunctionManager { if (!this.ready) { throw new Error('HogFunctionManager is not ready! Run HogFunctionManager.start() before this') } + const fn = this.cache.functions[hogFunctionId] if (fn?.team_id === teamId) { return fn diff --git a/plugin-server/src/cdp/types.ts b/plugin-server/src/cdp/types.ts index 3c4eed47c6d41..3ca31657cfb74 100644 --- a/plugin-server/src/cdp/types.ts +++ b/plugin-server/src/cdp/types.ts @@ -47,20 +47,6 @@ export interface HogFunctionFilters { bytecode?: HogBytecode } -// We have a "parsed" clickhous event type to make it easier to work with calls from kafka as well as those from the frontend -export interface ParsedClickhouseEvent { - uuid: string - event: string - team_id: number - distinct_id: string - person_id?: string - timestamp: string - created_at: string - properties: Record - person_created_at?: string - person_properties: Record -} - export type GroupType = { id: string // the "key" of the group type: string @@ -105,6 +91,10 @@ export type HogFunctionFilterGlobals = { event: string timestamp: string elements_chain: string + elements_chain_href: string + elements_chain_texts: string[] + elements_chain_ids: string[] + elements_chain_elements: string[] properties: Record person?: { @@ -156,8 +146,9 @@ export interface HogFunctionTiming { export type HogFunctionQueueParametersFetchRequest = { url: string method: string - body: string - headers: Record + return_queue: string + max_tries?: number + headers?: Record } export type HogFunctionQueueParametersFetchResponse = { @@ -166,7 +157,6 @@ export type HogFunctionQueueParametersFetchResponse = { /** The data to be passed to the Hog function from the response */ response?: { status: number - body: any } | null timings?: HogFunctionTiming[] logs?: LogEntry[] @@ -181,8 +171,10 @@ export type HogFunctionInvocation = { globals: HogFunctionInvocationGlobals teamId: Team['id'] hogFunction: HogFunctionType + priority: number queue: 'hog' | 'fetch' queueParameters?: HogFunctionInvocationQueueParameters + queueBlob?: Uint8Array // The current vmstate (set if the invocation is paused) vmState?: VMState timings: HogFunctionTiming[] diff --git a/plugin-server/src/cdp/utils.ts b/plugin-server/src/cdp/utils.ts index 158814718dc0a..c8e6cd25be2fe 100644 --- a/plugin-server/src/cdp/utils.ts +++ b/plugin-server/src/cdp/utils.ts @@ -1,6 +1,7 @@ // NOTE: PostIngestionEvent is our context event - it should never be sent directly to an output, but rather transformed into a lightweight schema import { DateTime } from 'luxon' +import RE2 from 're2' import { gunzip, gzip } from 'zlib' import { RawClickHouseEvent, Team, TimestampFormat } from '../types' @@ -15,7 +16,6 @@ import { HogFunctionInvocationSerialized, HogFunctionLogEntrySerialized, HogFunctionType, - ParsedClickhouseEvent, } from './types' export const PERSON_DEFAULT_DISPLAY_NAME_PROPERTIES = [ @@ -39,26 +39,6 @@ const getPersonDisplayName = (team: Team, distinctId: string, properties: Record return (customIdentifier || distinctId)?.trim() } -export function convertToParsedClickhouseEvent(event: RawClickHouseEvent): ParsedClickhouseEvent { - const properties = event.properties ? JSON.parse(event.properties) : {} - if (event.elements_chain) { - properties['$elements_chain'] = event.elements_chain - } - - return { - uuid: event.uuid, - event: event.event, - team_id: event.team_id, - distinct_id: event.distinct_id, - person_id: event.person_id, - timestamp: clickHouseTimestampToISO(event.timestamp), - created_at: clickHouseTimestampToISO(event.created_at), - properties: properties, - person_created_at: event.person_created_at ? clickHouseTimestampToISO(event.person_created_at) : undefined, - person_properties: event.person_properties ? JSON.parse(event.person_properties) : {}, - } -} - // that we can keep to as a contract export function convertToHogFunctionInvocationGlobals( event: RawClickHouseEvent, @@ -108,6 +88,46 @@ export function convertToHogFunctionInvocationGlobals( return context } +function getElementsChainHref(elementsChain: string): string { + // Adapted from SQL: extract(elements_chain, '(?::|\")href="(.*?)"'), + const hrefRegex = new RE2(/(?::|")href="(.*?)"/) + const hrefMatch = hrefRegex.exec(elementsChain) + return hrefMatch ? hrefMatch[1] : '' +} + +function getElementsChainTexts(elementsChain: string): string[] { + // Adapted from SQL: arrayDistinct(extractAll(elements_chain, '(?::|\")text="(.*?)"')), + const textRegex = new RE2(/(?::|")text="(.*?)"/g) + const textMatches = new Set() + let textMatch + while ((textMatch = textRegex.exec(elementsChain)) !== null) { + textMatches.add(textMatch[1]) + } + return Array.from(textMatches) +} + +function getElementsChainIds(elementsChain: string): string[] { + // Adapted from SQL: arrayDistinct(extractAll(elements_chain, '(?::|\")attr_id="(.*?)"')), + const idRegex = new RE2(/(?::|")attr_id="(.*?)"/g) + const idMatches = new Set() + let idMatch + while ((idMatch = idRegex.exec(elementsChain)) !== null) { + idMatches.add(idMatch[1]) + } + return Array.from(idMatches) +} + +function getElementsChainElements(elementsChain: string): string[] { + // Adapted from SQL: arrayDistinct(extractAll(elements_chain, '(?:^|;)(a|button|form|input|select|textarea|label)(?:\\.|$|:)')) + const elementRegex = new RE2(/(?:^|;)(a|button|form|input|select|textarea|label)(?:\.|$|:)/g) + const elementMatches = new Set() + let elementMatch + while ((elementMatch = elementRegex.exec(elementsChain)) !== null) { + elementMatches.add(elementMatch[1]) + } + return Array.from(elementMatches) +} + export function convertToHogFunctionFilterGlobal(globals: HogFunctionInvocationGlobals): HogFunctionFilterGlobals { const groups: Record = {} @@ -117,14 +137,53 @@ export function convertToHogFunctionFilterGlobal(globals: HogFunctionInvocationG } } - return { + const elementsChain = globals.event.properties['$elements_chain'] + const response = { event: globals.event.name, - elements_chain: globals.event.properties['$elements_chain'], + elements_chain: elementsChain, + elements_chain_href: '', + elements_chain_texts: [] as string[], + elements_chain_ids: [] as string[], + elements_chain_elements: [] as string[], timestamp: globals.event.timestamp, properties: globals.event.properties, person: globals.person ? { properties: globals.person.properties } : undefined, ...groups, + } satisfies HogFunctionFilterGlobals + + // The elements_chain_* fields are stored as materialized columns in ClickHouse. + // We use the same formula to calculate them here. + if (elementsChain) { + const cache: Record = {} + Object.defineProperties(response, { + elements_chain_href: { + get: () => { + cache.elements_chain_href ??= getElementsChainHref(elementsChain) + return cache.elements_chain_href + }, + }, + elements_chain_texts: { + get: () => { + cache.elements_chain_texts ??= getElementsChainTexts(elementsChain) + return cache.elements_chain_texts + }, + }, + elements_chain_ids: { + get: () => { + cache.elements_chain_ids ??= getElementsChainIds(elementsChain) + return cache.elements_chain_ids + }, + }, + elements_chain_elements: { + get: () => { + cache.elements_chain_elements ??= getElementsChainElements(elementsChain) + return cache.elements_chain_elements + }, + }, + }) } + + return response } export const convertToCaptureEvent = (event: HogFunctionCapturedEvent, team: Team): any => { @@ -222,16 +281,25 @@ export function createInvocation( teamId: hogFunction.team_id, hogFunction, queue: 'hog', + priority: 1, timings: [], } } -export function serializeInvocation(invocation: HogFunctionInvocation): HogFunctionInvocationSerialized { +export function serializeHogFunctionInvocation(invocation: HogFunctionInvocation): HogFunctionInvocationSerialized { const serializedInvocation: HogFunctionInvocationSerialized = { ...invocation, hogFunctionId: invocation.hogFunction.id, + // We clear the params as they are never used in the serialized form + queueParameters: undefined, + queueBlob: undefined, } delete (serializedInvocation as any).hogFunction - return invocation + + return serializedInvocation +} + +export function queueBlobToString(blob?: HogFunctionInvocation['queueBlob']): string | undefined { + return blob ? Buffer.from(blob).toString('utf-8') : undefined } diff --git a/plugin-server/src/config/config.ts b/plugin-server/src/config/config.ts index afa2ba1d72fe3..a0c64393c4352 100644 --- a/plugin-server/src/config/config.ts +++ b/plugin-server/src/config/config.ts @@ -183,14 +183,20 @@ export function getDefaultConfig(): PluginsServerConfig { CDP_WATCHER_REFILL_RATE: 10, CDP_WATCHER_DISABLED_TEMPORARY_MAX_COUNT: 3, CDP_ASYNC_FUNCTIONS_RUSTY_HOOK_TEAMS: '', - CDP_ASYNC_FUNCTIONS_CYCLOTRON_TEAMS: '', + CDP_CYCLOTRON_ENABLED_TEAMS: '', CDP_REDIS_PASSWORD: '', CDP_EVENT_PROCESSOR_EXECUTE_FIRST_STEP: true, CDP_REDIS_HOST: '', CDP_REDIS_PORT: 6479, + CDP_CYCLOTRON_BATCH_DELAY_MS: 50, + CDP_CYCLOTRON_BATCH_SIZE: 500, // Cyclotron - CYCLOTRON_DATABASE_URL: '', + CYCLOTRON_DATABASE_URL: isTestEnv() + ? 'postgres://posthog:posthog@localhost:5432/test_cyclotron' + : isDevEnv() + ? 'postgres://posthog:posthog@localhost:5432/cyclotron' + : '', } } diff --git a/plugin-server/src/main/ingestion-queues/session-recording/utils.ts b/plugin-server/src/main/ingestion-queues/session-recording/utils.ts index 94b930625b41d..16b336d8f952c 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/utils.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/utils.ts @@ -324,21 +324,21 @@ export const parseKafkaBatch = async ( continue } - const session_key = `${parsedMessage.team_id}:${parsedMessage.session_id}` - const existingMessage = parsedSessions.get(session_key) + const sessionKey = `${parsedMessage.team_id}:${parsedMessage.session_id}` + const existingMessage = parsedSessions.get(sessionKey) + if (existingMessage === undefined) { // First message for this session key, store it and continue looping for more - parsedSessions.set(session_key, parsedMessage) + parsedSessions.set(sessionKey, parsedMessage) continue } for (const [windowId, events] of Object.entries(parsedMessage.eventsByWindowId)) { - if (existingMessage.eventsByWindowId[windowId]) { - existingMessage.eventsByWindowId[windowId].push(...events) - } else { - existingMessage.eventsByWindowId[windowId] = events - } + existingMessage.eventsByWindowId[windowId] = (existingMessage.eventsByWindowId[windowId] || []).concat( + events + ) } + existingMessage.metadata.rawSize += parsedMessage.metadata.rawSize // Update the events ranges diff --git a/plugin-server/src/main/pluginsServer.ts b/plugin-server/src/main/pluginsServer.ts index cafdc0451806d..ff1f46b82d338 100644 --- a/plugin-server/src/main/pluginsServer.ts +++ b/plugin-server/src/main/pluginsServer.ts @@ -10,7 +10,12 @@ import v8Profiler from 'v8-profiler-next' import { getPluginServerCapabilities } from '../capabilities' import { CdpApi } from '../cdp/cdp-api' -import { CdpFunctionCallbackConsumer, CdpProcessedEventsConsumer } from '../cdp/cdp-consumers' +import { + CdpCyclotronWorker, + CdpCyclotronWorkerFetch, + CdpFunctionCallbackConsumer, + CdpProcessedEventsConsumer, +} from '../cdp/cdp-consumers' import { defaultConfig, sessionRecordingConsumerConfig } from '../config/config' import { Hub, PluginServerCapabilities, PluginServerService, PluginsServerConfig } from '../types' import { closeHub, createHub, createKafkaClient, createKafkaProducerWrapper } from '../utils/db/hub' @@ -458,16 +463,23 @@ export async function startPluginsServer( } } - // if (capabilities.cdpCyclotronWorker) { - // ;[hub, closeHub] = hub ? [hub, closeHub] : await createHub(serverConfig, capabilities) - // if (hub.CYCLOTRON_DATABASE_URL) { - // const worker = new CdpCyclotronWorker(hub) - // await worker.start() - // } else { - // // This is a temporary solution until we *require* Cyclotron to be configured. - // status.warn('💥', 'CYCLOTRON_DATABASE_URL is not set, not running Cyclotron worker') - // } - // } + if (capabilities.cdpCyclotronWorker) { + const hub = await setupHub() + + if (!hub.CYCLOTRON_DATABASE_URL) { + status.error('💥', 'Cyclotron database URL not set.') + } else { + const worker = new CdpCyclotronWorker(hub) + await worker.start() + services.push(worker.service) + + if (process.env.EXPERIMENTAL_CDP_FETCH_WORKER) { + const workerFetch = new CdpCyclotronWorkerFetch(hub) + await workerFetch.start() + services.push(workerFetch.service) + } + } + } if (capabilities.http) { const app = setupCommonRoutes(services) diff --git a/plugin-server/src/types.ts b/plugin-server/src/types.ts index 58253a210abd3..90bea28edc33d 100644 --- a/plugin-server/src/types.ts +++ b/plugin-server/src/types.ts @@ -113,7 +113,9 @@ export type CdpConfig = { CDP_WATCHER_DISABLED_TEMPORARY_TTL: number // How long a function should be temporarily disabled for CDP_WATCHER_DISABLED_TEMPORARY_MAX_COUNT: number // How many times a function can be disabled before it is disabled permanently CDP_ASYNC_FUNCTIONS_RUSTY_HOOK_TEAMS: string - CDP_ASYNC_FUNCTIONS_CYCLOTRON_TEAMS: string + CDP_CYCLOTRON_ENABLED_TEAMS: string + CDP_CYCLOTRON_BATCH_SIZE: number + CDP_CYCLOTRON_BATCH_DELAY_MS: number CDP_REDIS_HOST: string CDP_REDIS_PORT: number CDP_REDIS_PASSWORD: string diff --git a/plugin-server/src/utils/status.ts b/plugin-server/src/utils/status.ts index 385b97739685e..0b6b8f26ca1c5 100644 --- a/plugin-server/src/utils/status.ts +++ b/plugin-server/src/utils/status.ts @@ -15,7 +15,7 @@ export interface StatusBlueprint { export class Status implements StatusBlueprint { mode?: string - logger: pino.Logger + private logger?: pino.Logger prompt: string transport: any @@ -59,11 +59,23 @@ export class Status implements StatusBlueprint { close() { this.transport?.end() + this.logger = undefined } buildMethod(type: keyof StatusBlueprint): StatusMethod { return (icon: string, message: string, extra: object) => { const logMessage = `[${this.prompt}] ${icon} ${message}` + + if (!this.logger) { + if (isProdEnv()) { + // This can throw on tests if the logger is closed. We don't really want tests to be bothered with this. + throw new Error(`Logger has been closed! Cannot log: ${logMessage}`) + } + console.log( + `Logger has been closed! Cannot log: ${logMessage}. Logging to console instead due to non-prod env.` + ) + return + } if (extra instanceof Object) { this.logger[type]({ ...extra, msg: logMessage }) } else { diff --git a/plugin-server/src/worker/ingestion/process-event.ts b/plugin-server/src/worker/ingestion/process-event.ts index 1c5499879e3c7..f03ca9d85fb04 100644 --- a/plugin-server/src/worker/ingestion/process-event.ts +++ b/plugin-server/src/worker/ingestion/process-event.ts @@ -26,7 +26,7 @@ import { castTimestampOrNow } from '../../utils/utils' import { GroupTypeManager, MAX_GROUP_TYPES_PER_TEAM } from './group-type-manager' import { addGroupProperties } from './groups' import { upsertGroup } from './properties-updater' -import { PropertyDefinitionsManager } from './property-definitions-manager' +import { GroupAndFirstEventManager } from './property-definitions-manager' import { TeamManager } from './team-manager' import { captureIngestionWarning } from './utils' @@ -49,7 +49,7 @@ export class EventsProcessor { kafkaProducer: KafkaProducerWrapper teamManager: TeamManager groupTypeManager: GroupTypeManager - propertyDefinitionsManager: PropertyDefinitionsManager + groupAndFirstEventManager: GroupAndFirstEventManager constructor(pluginsServer: Hub) { this.pluginsServer = pluginsServer @@ -58,11 +58,10 @@ export class EventsProcessor { this.kafkaProducer = pluginsServer.kafkaProducer this.teamManager = pluginsServer.teamManager this.groupTypeManager = new GroupTypeManager(pluginsServer.postgres, this.teamManager, pluginsServer.SITE_URL) - this.propertyDefinitionsManager = new PropertyDefinitionsManager( + this.groupAndFirstEventManager = new GroupAndFirstEventManager( this.teamManager, this.groupTypeManager, - pluginsServer.db, - pluginsServer + pluginsServer.db ) } @@ -156,7 +155,7 @@ export class EventsProcessor { if (this.pluginsServer.SKIP_UPDATE_EVENT_AND_PROPERTIES_STEP === false) { try { - await this.propertyDefinitionsManager.updateEventNamesAndProperties(team.id, event, properties) + await this.groupAndFirstEventManager.updateGroupsAndFirstEvent(team.id, event, properties) } catch (err) { Sentry.captureException(err, { tags: { team_id: team.id } }) status.warn('⚠️', 'Failed to update property definitions for an event', { diff --git a/plugin-server/src/worker/ingestion/property-definitions-auto-discovery.ts b/plugin-server/src/worker/ingestion/property-definitions-auto-discovery.ts deleted file mode 100644 index bcffcbbc3c89f..0000000000000 --- a/plugin-server/src/worker/ingestion/property-definitions-auto-discovery.ts +++ /dev/null @@ -1,129 +0,0 @@ -import { DateTimePropertyTypeFormat, PropertyType, UnixTimestampPropertyTypeFormat } from '../../types' - -// magic copied from https://stackoverflow.com/a/54930905 -// allows candidate to be typed as any - -export const unixTimestampPropertyTypeFormatPatterns: Record = { - UNIX_TIMESTAMP: /^\d{10}(\.\d*)?$/, - UNIX_TIMESTAMP_MILLISECONDS: /^\d{13}$/, -} - -export const dateTimePropertyTypeFormatPatterns: Record = { - DATE: /^\d{4}-\d{2}-\d{2}$/, - ISO8601_DATE: /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d+)?(?:Z|[+-]\d{2}:?(?:\d{2})?)$/i, - FULL_DATE: /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$/, - FULL_DATE_INCREASING: /^\d{2}-\d{2}-\d{4} \d{2}:\d{2}:\d{2}$/, - WITH_SLASHES: /^\d{4}\/\d{2}\/\d{2} \d{2}:\d{2}:\d{2}$/, - WITH_SLASHES_INCREASING: /^\d{2}\/\d{2}\/\d{4} \d{2}:\d{2}:\d{2}$/, - // see https://datatracker.ietf.org/doc/html/rfc2822#section-3.3 - RFC_822: - /^((mon|tue|wed|thu|fri|sat|sun), )?\d{2} (jan|feb|mar|apr|may|jun|jul|aug|sep|oct|nov|dec) \d{4} \d{2}:\d{2}:\d{2}( [+|-]\d{4})?$/i, -} - -export const detectPropertyDefinitionTypes = (value: unknown, key: string): PropertyType | null => { - let propertyType: PropertyType | null = null - - /** - * Auto-detecting unix timestamps is tricky. It's hard to know what is a big number or ID and what is a timestamp - * - * This tries to detect the most likely cases. - * - * * Numbers or Numeric Strings - * * That are either ten digits (seconds since unix epoch), or 13 digits (milliseconds since unix epoch), - * * or ten digits with numbers after the decimal place (whole seconds since unix epoch and fractions of a second) - * * where the property key includes either time or timestamp - * - * ten digits of seconds since epoch runs between Sep 09 2001 and Nov 20th 2286 - * - * These are some representations from a variety of programming languages - * - * Python - * >>> datetime.now().timestamp() - * 1641477529.234715 - * - * Ruby - * puts Time.now.to_i - * 1641477692 - * - * Node JS - * console.log(Date.now()) - * 1641477753371 - * - * Java - * System.out.println(LocalDateTime.now().toEpochSecond(ZoneOffset.UTC)); - * 1641478115 - * - * SQL Lite - * select strftime('%s', 'now') - * 1641478347 - */ - const detectUnixTimestamps = () => { - Object.values(unixTimestampPropertyTypeFormatPatterns).find((pattern) => { - if ( - (key.toLowerCase().includes('timestamp') || key.toLowerCase().includes('time')) && - String(value).match(pattern) - ) { - propertyType = PropertyType.DateTime - return true - } - }) - } - - if (/^utm_/i.test(key)) { - // utm_ prefixed properties should always be detected as strings. - // Sometimes the first value sent looks like a number, event though - // subsequent values are not. See - // https://github.com/PostHog/posthog/issues/12529 for more context. - return PropertyType.String - } - - if (key.indexOf('$feature/') === 0) { - // $feature/ prefixed properties should always be detected as strings. - // These are feature flag values, and can be boolean or string. - // Sometimes the first value sent is boolean (because flag isn't enabled) while - // subsequent values are not. We don't want this to be misunderstood as a boolean. - return PropertyType.String - } - - if (key === '$feature_flag_response') { - // $feature_flag_response properties should always be detected as strings. - // These are feature flag values, and can be boolean or string. - // Sometimes the first value sent is boolean (because flag isn't enabled) while - // subsequent values are not. We don't want this to be misunderstood as a boolean. - return PropertyType.String - } - - if (key.indexOf('$survey_response') === 0) { - // NB: $survey_responses are collected in an interesting way, where the first - // response is called `$survey_response` and subsequent responses are called - // `$survey_response_2`, `$survey_response_3`, etc. So, this check should auto-cast - // all survey responses to strings, and $survey_response properties should always be detected as strings. - return PropertyType.String - } - - if (typeof value === 'number') { - propertyType = PropertyType.Numeric - - detectUnixTimestamps() - } - - if (typeof value === 'string') { - propertyType = PropertyType.String - - Object.values(dateTimePropertyTypeFormatPatterns).find((pattern) => { - if (value.match(pattern)) { - propertyType = PropertyType.DateTime - return true - } - }) - } - - if ( - typeof value === 'boolean' || - (typeof value === 'string' && ['true', 'false'].includes(value.trim().toLowerCase())) - ) { - propertyType = PropertyType.Boolean - } - - return propertyType -} diff --git a/plugin-server/src/worker/ingestion/property-definitions-cache.ts b/plugin-server/src/worker/ingestion/property-definitions-cache.ts deleted file mode 100644 index db91dd7fc8d2a..0000000000000 --- a/plugin-server/src/worker/ingestion/property-definitions-cache.ts +++ /dev/null @@ -1,96 +0,0 @@ -import LRU from 'lru-cache' -import LRUCache from 'lru-cache' - -import { ONE_HOUR } from '../../config/constants' -import { GroupTypeIndex, PluginsServerConfig, PropertyDefinitionTypeEnum, PropertyType, TeamId } from '../../types' -import { DB } from '../../utils/db/db' -import { PostgresUse } from '../../utils/db/postgres' - -export const NULL_IN_DATABASE = Symbol('NULL_IN_DATABASE') -export const NULL_AFTER_PROPERTY_TYPE_DETECTION = Symbol('NULL_AFTER_PROPERTY_TYPE_DETECTION') - -type PropertyDefinitionsCacheValue = PropertyType | typeof NULL_IN_DATABASE | typeof NULL_AFTER_PROPERTY_TYPE_DETECTION - -/** - * During event ingestion the property definitions manager attempts to auto-detect the property type and format for properties - * - * The PropertyDefinitionsCache is used to reduce the load on Postgres - * when inserting property definitions during event ingestion - * - * A property definition can be in one of several states - * - * - never seen before -> it is not in the cache and should be inserted into the database - * - in the cache and has a property type -> it never needs to be updated - * - in the cache and has null as a property type -> it might need property types inserted in postgres ('NULL_IN_DATABASE') - * - it is in the cache and has been confirmed as having no property type -> it never needs to be updated ('NULL_AFTER_PROPERTY_TYPE_DETECTION') - */ -export class PropertyDefinitionsCache { - readonly propertyDefinitionsCache: Map> - private readonly lruCacheSize: number - - constructor(serverConfig: PluginsServerConfig) { - this.lruCacheSize = serverConfig.EVENT_PROPERTY_LRU_SIZE - this.propertyDefinitionsCache = new Map() - } - - async initialize(teamId: number, db: DB): Promise { - const properties = await db.postgres.query( - PostgresUse.COMMON_WRITE, - 'SELECT name, property_type, type, group_type_index FROM posthog_propertydefinition WHERE team_id = $1', - [teamId], - 'fetchPropertyDefinitions' - ) - - const teamPropertyDefinitionsCache = new LRU({ - max: this.lruCacheSize, // keep in memory the last 10k property definitions we have seen - maxAge: ONE_HOUR * 24, // cache up to 24h - updateAgeOnGet: true, - }) - - for (const item of properties.rows) { - teamPropertyDefinitionsCache.set( - this.key(item.name, item.type, item.group_type_index), - item.property_type ?? NULL_IN_DATABASE - ) - } - - this.propertyDefinitionsCache.set(teamId, teamPropertyDefinitionsCache) - } - - has(teamId: number): boolean { - return this.propertyDefinitionsCache.has(teamId) - } - - shouldUpdate( - teamId: number, - property: string, - type: PropertyDefinitionTypeEnum, - groupTypeIndex: GroupTypeIndex | null - ): boolean { - const teamCache = this.propertyDefinitionsCache.get(teamId) - const value = teamCache?.get(this.key(property, type, groupTypeIndex)) - return value === undefined || value === NULL_IN_DATABASE - } - - set( - teamId: number, - property: string, - type: PropertyDefinitionTypeEnum, - groupTypeIndex: GroupTypeIndex | null, - detectedPropertyType: PropertyType | null - ): void { - const teamCache = this.propertyDefinitionsCache.get(teamId) - teamCache?.set( - this.key(property, type, groupTypeIndex), - detectedPropertyType ?? NULL_AFTER_PROPERTY_TYPE_DETECTION - ) - } - - get(teamId: number): LRUCache | undefined { - return this.propertyDefinitionsCache.get(teamId) - } - - private key(property: string, type: PropertyDefinitionTypeEnum, groupTypeIndex: GroupTypeIndex | null): string { - return `${type}${groupTypeIndex ?? ''}${property}` - } -} diff --git a/plugin-server/src/worker/ingestion/property-definitions-manager.ts b/plugin-server/src/worker/ingestion/property-definitions-manager.ts index fd99af328958d..98920ca7dab7e 100644 --- a/plugin-server/src/worker/ingestion/property-definitions-manager.ts +++ b/plugin-server/src/worker/ingestion/property-definitions-manager.ts @@ -1,41 +1,14 @@ import { Properties } from '@posthog/plugin-scaffold' -import LRU from 'lru-cache' -import { DateTime } from 'luxon' import { Summary } from 'prom-client' -import { ONE_HOUR } from '../../config/constants' -import { - GroupTypeIndex, - PluginsServerConfig, - PropertyDefinitionTypeEnum, - PropertyType, - Team, - TeamId, -} from '../../types' +import { Team } from '../../types' import { DB } from '../../utils/db/db' -import { PostgresUse } from '../../utils/db/postgres' -import { sanitizeString, timeoutGuard } from '../../utils/db/utils' -import { status } from '../../utils/status' -import { UUIDT } from '../../utils/utils' +import { timeoutGuard } from '../../utils/db/utils' import { GroupTypeManager } from './group-type-manager' -import { detectPropertyDefinitionTypes } from './property-definitions-auto-discovery' -import { PropertyDefinitionsCache } from './property-definitions-cache' import { TeamManager } from './team-manager' // for e.g. internal events we don't want to be available for users in the UI const EVENTS_WITHOUT_EVENT_DEFINITION = ['$$plugin_metrics'] -// These are used internally for manipulating person/group properties -const NOT_SYNCED_PROPERTIES = new Set([ - '$set', - '$set_once', - '$unset', - '$group_0', - '$group_1', - '$group_2', - '$group_3', - '$group_4', - '$groups', -]) const updateEventNamesAndPropertiesMsSummary = new Summary({ name: 'update_event_names_and_properties_ms', @@ -43,78 +16,22 @@ const updateEventNamesAndPropertiesMsSummary = new Summary({ percentiles: [0.5, 0.9, 0.95, 0.99], }) -type PartialPropertyDefinition = { - key: string - type: PropertyDefinitionTypeEnum - value: any - groupTypeIndex: GroupTypeIndex | null -} - -// See EventProperty and EventDefinition in Django. They have CharFields with a `max_length` we -// need to respect. Note that `character varying` columns deal in characters and not in bytes. -const DJANGO_EVENT_MAX_CHARFIELD_LENGTH = 400 - -function willFitInPostgresColumn(str: string, maxLength = DJANGO_EVENT_MAX_CHARFIELD_LENGTH) { - if (str.length <= maxLength / 2) { - // If it's half or less the length, then it will fit even if every character contains - // a surrogate pair. - return true - } - - // Gives us a correct unicode character count, handling surrogate pairs. - const unicodeCharacters = Array.from(str) - return unicodeCharacters.length <= maxLength -} - -export class PropertyDefinitionsManager { +export class GroupAndFirstEventManager { db: DB teamManager: TeamManager groupTypeManager: GroupTypeManager - eventDefinitionsCache: LRU> - eventPropertiesCache: LRU> // Map> - eventLastSeenCache: LRU // key: JSON.stringify([team_id, event]); value: parseInt(YYYYMMDD) - propertyDefinitionsCache: PropertyDefinitionsCache - private readonly lruCacheSize: number - constructor( - teamManager: TeamManager, - groupTypeManager: GroupTypeManager, - db: DB, - serverConfig: PluginsServerConfig - ) { + constructor(teamManager: TeamManager, groupTypeManager: GroupTypeManager, db: DB) { this.db = db this.teamManager = teamManager this.groupTypeManager = groupTypeManager - this.lruCacheSize = serverConfig.EVENT_PROPERTY_LRU_SIZE - - this.eventDefinitionsCache = new LRU({ - max: this.lruCacheSize, - maxAge: ONE_HOUR * 24, - updateAgeOnGet: true, - }) - this.eventPropertiesCache = new LRU({ - max: this.lruCacheSize, // keep in memory the last 10k team+event combos we have seen - maxAge: ONE_HOUR * 24, // cache up to 24h - updateAgeOnGet: true, - }) - this.eventLastSeenCache = new LRU({ - max: this.lruCacheSize, // keep in memory the last 10k team+event combos we have seen - maxAge: ONE_HOUR * 24, // cache up to 24h - updateAgeOnGet: true, - }) - this.propertyDefinitionsCache = new PropertyDefinitionsCache(serverConfig) } - public async updateEventNamesAndProperties(teamId: number, event: string, properties: Properties): Promise { + public async updateGroupsAndFirstEvent(teamId: number, event: string, properties: Properties): Promise { if (EVENTS_WITHOUT_EVENT_DEFINITION.includes(event)) { return } - event = sanitizeString(event) - if (!willFitInPostgresColumn(event)) { - return - } - const timer = new Date() const timeout = timeoutGuard( 'Still running "updateEventNamesAndProperties". Timeout warning after 30 sec!', @@ -129,211 +46,25 @@ export class PropertyDefinitionsManager { if (!team) { return } - await this.cacheEventNamesAndProperties(team.id, event) - await Promise.all([ - this.syncEventDefinitions(team, event), - this.syncEventProperties(team, event, properties), - this.syncPropertyDefinitions(team, event, properties), - this.teamManager.setTeamIngestedEvent(team, properties), - ]) - } finally { - clearTimeout(timeout) - updateEventNamesAndPropertiesMsSummary.observe(Date.now() - timer.valueOf()) - } - } - - private async syncEventDefinitions(team: Team, event: string) { - const cacheKey = JSON.stringify([team.id, event]) - const cacheTime = parseInt(DateTime.now().toFormat('yyyyMMdd', { timeZone: 'UTC' })) - - if (!this.eventDefinitionsCache.get(team.id)?.has(event)) { - status.info('Inserting new event definition with last_seen_at') - this.eventLastSeenCache.set(cacheKey, cacheTime) - await this.db.postgres.query( - PostgresUse.COMMON_WRITE, - `INSERT INTO posthog_eventdefinition (id, name, volume_30_day, query_usage_30_day, team_id, last_seen_at, created_at) -VALUES ($1, $2, NULL, NULL, $3, $4, NOW()) ON CONFLICT -ON CONSTRAINT posthog_eventdefinition_team_id_name_80fa0b87_uniq DO UPDATE SET last_seen_at=$4`, - [new UUIDT().toString(), event, team.id, DateTime.now()], - 'insertEventDefinition' - ) - this.eventDefinitionsCache.get(team.id)?.add(event) - } else { - if ((this.eventLastSeenCache.get(cacheKey) ?? 0) < cacheTime) { - this.eventLastSeenCache.set(cacheKey, cacheTime) - await this.db.postgres.query( - PostgresUse.COMMON_WRITE, - `UPDATE posthog_eventdefinition SET last_seen_at=$1 WHERE team_id=$2 AND name=$3`, - [DateTime.now(), team.id, event], - 'updateEventLastSeenAt' - ) - } - } - } - - private async syncEventProperties(team: Team, event: string, properties: Properties) { - const key = JSON.stringify([team.id, event]) - let existingProperties = this.eventPropertiesCache.get(key) - const toInsert: Array<[string, string, TeamId]> = [] - if (!existingProperties) { - existingProperties = new Set() - this.eventPropertiesCache.set(key, existingProperties) - } - - for (let property of this.getPropertyKeys(properties)) { - property = sanitizeString(property) - if (!willFitInPostgresColumn(property)) { - continue - } - - if (!existingProperties.has(property)) { - existingProperties.add(property) - toInsert.push([event, property, team.id]) - } - } - - if (toInsert.length > 0) { - await this.db.postgres.bulkInsert( - PostgresUse.COMMON_WRITE, - `INSERT INTO posthog_eventproperty (event, property, team_id) VALUES {VALUES} ON CONFLICT DO NOTHING`, - toInsert, - 'insertEventProperty' - ) - } - } - - private async syncPropertyDefinitions(team: Team, event: string, properties: Properties) { - const toInsert: Array< - [string, string, number, number | null, boolean, null, null, TeamId, PropertyType | null] - > = [] - for await (const definitions of this.getPropertyDefinitions(team.id, event, properties)) { - let { key } = definitions - key = sanitizeString(key) - if (!willFitInPostgresColumn(key)) { - continue - } - const { value, type, groupTypeIndex } = definitions - if (this.propertyDefinitionsCache.shouldUpdate(team.id, key, type, groupTypeIndex)) { - const propertyType = detectPropertyDefinitionTypes(value, key) - const isNumerical = propertyType == PropertyType.Numeric - this.propertyDefinitionsCache.set(team.id, key, type, groupTypeIndex, propertyType) - - toInsert.push([ - new UUIDT().toString(), - key, - type, - groupTypeIndex, - isNumerical, - null, - null, - team.id, - propertyType, - ]) + // We always track 1st event ingestion + const promises = [this.teamManager.setTeamIngestedEvent(team, properties)] + + // We always insert/update group-types, so if this is a group-identify event, we hit + // the group-type manager, making it insert or update as necessary. + if (event === '$groupidentify') { + const { $group_type: groupType, $group_set: groupPropertiesToSet } = properties + if (groupType != null && groupPropertiesToSet != null) { + // This "fetch" is side-effecty, it inserts a group-type and assigns an index if one isn't found + const groupPromise = this.groupTypeManager.fetchGroupTypeIndex(teamId, groupType).then(() => {}) + promises.push(groupPromise) + } } - } - - if (toInsert.length > 0) { - await this.db.postgres.bulkInsert( - PostgresUse.COMMON_WRITE, - ` - INSERT INTO posthog_propertydefinition (id, name, type, group_type_index, is_numerical, volume_30_day, query_usage_30_day, team_id, property_type) - VALUES {VALUES} - ON CONFLICT (team_id, name, type, coalesce(group_type_index, -1)) - DO UPDATE SET property_type=EXCLUDED.property_type WHERE posthog_propertydefinition.property_type IS NULL - `, - toInsert, - 'insertPropertyDefinition' - ) - } - } - - public async cacheEventNamesAndProperties(teamId: number, event: string): Promise { - let eventDefinitionsCache = this.eventDefinitionsCache.get(teamId) - if (!eventDefinitionsCache) { - const eventNames = await this.db.postgres.query( - PostgresUse.COMMON_WRITE, - 'SELECT name FROM posthog_eventdefinition WHERE team_id = $1', - [teamId], - 'fetchEventDefinitions' - ) - eventDefinitionsCache = new Set(eventNames.rows.map((r) => r.name)) - this.eventDefinitionsCache.set(teamId, eventDefinitionsCache) - } - - if (!this.propertyDefinitionsCache.has(teamId)) { - await this.propertyDefinitionsCache.initialize(teamId, this.db) - } - - const cacheKey = JSON.stringify([teamId, event]) - let properties = this.eventPropertiesCache.get(cacheKey) - if (!properties) { - properties = new Set() - this.eventPropertiesCache.set(cacheKey, properties) - // The code above and below introduces a race condition. At this point we have an empty set in the cache, - // and will be waiting for the query below to return. If at the same time, asynchronously, we start to - // process another event with the same name for this team, `syncEventProperties` above will see the empty - // cache and will start to insert (on conflict do nothing) all the properties for the event. This will - // continue until either 1) the inserts will fill up the cache, or 2) the query below returns. - // All-in-all, not the end of the world, but a slight nuisance. - - const eventProperties = await this.db.postgres.query( - PostgresUse.COMMON_WRITE, - 'SELECT property FROM posthog_eventproperty WHERE team_id = $1 AND event = $2', - [teamId, event], - 'fetchEventProperties' - ) - for (const { property } of eventProperties.rows) { - properties.add(property) - } - } - } - - private getPropertyKeys(properties: Properties): Array { - return Object.keys(properties).filter((key) => !NOT_SYNCED_PROPERTIES.has(key)) - } - - private async *getPropertyDefinitions( - teamId: number, - event: string, - properties: Properties - ): AsyncGenerator { - if (event === '$groupidentify') { - const { $group_type: groupType, $group_set: groupPropertiesToSet } = properties - if (groupType != null && groupPropertiesToSet != null) { - const groupTypeIndex = await this.groupTypeManager.fetchGroupTypeIndex(teamId, groupType) - // TODO: add further validation that group properties are of the - // expected type - yield* this.extract(groupPropertiesToSet, PropertyDefinitionTypeEnum.Group, groupTypeIndex) - } - } else { - yield* this.extract(properties, PropertyDefinitionTypeEnum.Event) - - if (properties.$set) { - yield* this.extract(properties.$set, PropertyDefinitionTypeEnum.Person) - } - if (properties.$set_once) { - yield* this.extract(properties.$set_once, PropertyDefinitionTypeEnum.Person) - } - } - } - - private *extract( - properties: Properties, - type: PropertyDefinitionTypeEnum, - groupTypeIndex: GroupTypeIndex | null = null - ): Generator { - for (const [key, value] of Object.entries(properties)) { - if (type === PropertyDefinitionTypeEnum.Event && NOT_SYNCED_PROPERTIES.has(key)) { - continue - } - yield { - key, - type, - value, - groupTypeIndex, - } + await Promise.all(promises) + } finally { + clearTimeout(timeout) + updateEventNamesAndPropertiesMsSummary.observe(Date.now() - timer.valueOf()) } } } diff --git a/plugin-server/tests/cdp/cdp-e2e.test.ts b/plugin-server/tests/cdp/cdp-e2e.test.ts new file mode 100644 index 0000000000000..b5423459e284e --- /dev/null +++ b/plugin-server/tests/cdp/cdp-e2e.test.ts @@ -0,0 +1,225 @@ +import { + CdpCyclotronWorker, + CdpCyclotronWorkerFetch, + CdpFunctionCallbackConsumer, + CdpProcessedEventsConsumer, +} from '../../src/cdp/cdp-consumers' +import { HogFunctionInvocationGlobals, HogFunctionType } from '../../src/cdp/types' +import { KAFKA_APP_METRICS_2, KAFKA_LOG_ENTRIES } from '../../src/config/kafka-topics' +import { Hub, Team } from '../../src/types' +import { closeHub, createHub } from '../../src/utils/db/hub' +import { waitForExpect } from '../helpers/expectations' +import { getFirstTeam, resetTestDatabase } from '../helpers/sql' +import { HOG_EXAMPLES, HOG_FILTERS_EXAMPLES, HOG_INPUTS_EXAMPLES } from './examples' +import { createHogExecutionGlobals, insertHogFunction as _insertHogFunction } from './fixtures' +import { createKafkaObserver, TestKafkaObserver } from './helpers/kafka-observer' + +jest.mock('../../src/utils/fetch', () => { + return { + trackedFetch: jest.fn(() => + Promise.resolve({ + status: 200, + text: () => Promise.resolve(JSON.stringify({ success: true })), + json: () => Promise.resolve({ success: true }), + }) + ), + } +}) + +const mockFetch: jest.Mock = require('../../src/utils/fetch').trackedFetch + +describe('CDP E2E', () => { + jest.setTimeout(10000) + describe.each(['kafka', 'cyclotron'])('e2e fetch call: %s', (mode) => { + let processedEventsConsumer: CdpProcessedEventsConsumer + let functionProcessor: CdpFunctionCallbackConsumer + let cyclotronWorker: CdpCyclotronWorker | undefined + let cyclotronFetchWorker: CdpCyclotronWorkerFetch | undefined + let hub: Hub + let team: Team + let kafkaObserver: TestKafkaObserver + let fnFetchNoFilters: HogFunctionType + let globals: HogFunctionInvocationGlobals + + const insertHogFunction = async (hogFunction: Partial) => { + const item = await _insertHogFunction(hub.postgres, team.id, hogFunction) + return item + } + + beforeEach(async () => { + await resetTestDatabase() + hub = await createHub() + team = await getFirstTeam(hub) + + fnFetchNoFilters = await insertHogFunction({ + ...HOG_EXAMPLES.simple_fetch, + ...HOG_INPUTS_EXAMPLES.simple_fetch, + ...HOG_FILTERS_EXAMPLES.no_filters, + }) + + if (mode === 'cyclotron') { + hub.CDP_CYCLOTRON_ENABLED_TEAMS = '*' + hub.CYCLOTRON_DATABASE_URL = 'postgres://posthog:posthog@localhost:5432/test_cyclotron' + } + + kafkaObserver = await createKafkaObserver(hub, [KAFKA_APP_METRICS_2, KAFKA_LOG_ENTRIES]) + + processedEventsConsumer = new CdpProcessedEventsConsumer(hub) + await processedEventsConsumer.start() + functionProcessor = new CdpFunctionCallbackConsumer(hub) + await functionProcessor.start() + + if (mode === 'cyclotron') { + cyclotronWorker = new CdpCyclotronWorker(hub) + await cyclotronWorker.start() + cyclotronFetchWorker = new CdpCyclotronWorkerFetch(hub) + await cyclotronFetchWorker.start() + } + + globals = createHogExecutionGlobals({ + project: { + id: team.id, + } as any, + event: { + uuid: 'b3a1fe86-b10c-43cc-acaf-d208977608d0', + name: '$pageview', + properties: { + $current_url: 'https://posthog.com', + $lib_version: '1.0.0', + }, + timestamp: '2024-09-03T09:00:00Z', + } as any, + }) + + mockFetch.mockClear() + }) + + afterEach(async () => { + console.log('AfterEach', { + processedEventsConsumer, + functionProcessor, + kafkaObserver, + cyclotronWorker, + cyclotronFetchWorker, + }) + + const stoppers = [ + processedEventsConsumer?.stop().then(() => console.log('Stopped processedEventsConsumer')), + functionProcessor?.stop().then(() => console.log('Stopped functionProcessor')), + kafkaObserver?.stop().then(() => console.log('Stopped kafkaObserver')), + cyclotronWorker?.stop().then(() => console.log('Stopped cyclotronWorker')), + cyclotronFetchWorker?.stop().then(() => console.log('Stopped cyclotronFetchWorker')), + ] + + await Promise.all(stoppers) + + await closeHub(hub) + }) + + afterAll(() => { + jest.useRealTimers() + }) + + /** + * Tests here are somewhat expensive so should mostly simulate happy paths and the more e2e scenarios + */ + + it('should invoke a function in the worker loop until completed', async () => { + // NOTE: We can skip kafka as the entry point + const invocations = await processedEventsConsumer.processBatch([globals]) + expect(invocations).toHaveLength(1) + + await waitForExpect(() => { + expect(kafkaObserver.messages).toHaveLength(6) + }) + + expect(mockFetch).toHaveBeenCalledTimes(1) + + expect(mockFetch.mock.calls[0]).toMatchInlineSnapshot(` + Array [ + "https://example.com/posthog-webhook", + Object { + "body": "{\\"event\\":{\\"uuid\\":\\"b3a1fe86-b10c-43cc-acaf-d208977608d0\\",\\"name\\":\\"$pageview\\",\\"distinct_id\\":\\"distinct_id\\",\\"url\\":\\"http://localhost:8000/events/1\\",\\"properties\\":{\\"$current_url\\":\\"https://posthog.com\\",\\"$lib_version\\":\\"1.0.0\\"},\\"timestamp\\":\\"2024-09-03T09:00:00Z\\"},\\"groups\\":{},\\"nested\\":{\\"foo\\":\\"http://localhost:8000/events/1\\"},\\"person\\":{\\"uuid\\":\\"uuid\\",\\"name\\":\\"test\\",\\"url\\":\\"http://localhost:8000/persons/1\\",\\"properties\\":{\\"email\\":\\"test@posthog.com\\"}},\\"event_url\\":\\"http://localhost:8000/events/1-test\\"}", + "headers": Object { + "version": "v=1.0.0", + }, + "method": "POST", + "timeout": 10000, + }, + ] + `) + + const logMessages = kafkaObserver.messages.filter((m) => m.topic === KAFKA_LOG_ENTRIES) + const metricsMessages = kafkaObserver.messages.filter((m) => m.topic === KAFKA_APP_METRICS_2) + + expect(metricsMessages).toMatchObject([ + { + topic: 'clickhouse_app_metrics2_test', + value: { + app_source: 'hog_function', + app_source_id: fnFetchNoFilters.id.toString(), + count: 1, + metric_kind: 'success', + metric_name: 'succeeded', + team_id: 2, + }, + }, + ]) + + expect(logMessages).toMatchObject([ + { + topic: 'log_entries_test', + value: { + level: 'debug', + log_source: 'hog_function', + log_source_id: fnFetchNoFilters.id.toString(), + message: 'Executing function', + team_id: 2, + }, + }, + { + topic: 'log_entries_test', + value: { + level: 'debug', + log_source: 'hog_function', + log_source_id: fnFetchNoFilters.id.toString(), + message: expect.stringContaining( + "Suspending function due to async function call 'fetch'. Payload:" + ), + team_id: 2, + }, + }, + { + topic: 'log_entries_test', + value: { + level: 'debug', + log_source: 'hog_function', + log_source_id: fnFetchNoFilters.id.toString(), + message: 'Resuming function', + team_id: 2, + }, + }, + { + topic: 'log_entries_test', + value: { + level: 'info', + log_source: 'hog_function', + log_source_id: fnFetchNoFilters.id.toString(), + message: `Fetch response:, {"status":200,"body":{"success":true}}`, + team_id: 2, + }, + }, + { + topic: 'log_entries_test', + value: { + level: 'debug', + log_source: 'hog_function', + log_source_id: fnFetchNoFilters.id.toString(), + message: expect.stringContaining('Function completed in'), + team_id: 2, + }, + }, + ]) + }) + }) +}) diff --git a/plugin-server/tests/cdp/cdp-consumer.e2e.test.ts b/plugin-server/tests/cdp/cdp-function-processor.test.ts similarity index 97% rename from plugin-server/tests/cdp/cdp-consumer.e2e.test.ts rename to plugin-server/tests/cdp/cdp-function-processor.test.ts index 8d6581aef9ef0..5fb097b0a5c5e 100644 --- a/plugin-server/tests/cdp/cdp-consumer.e2e.test.ts +++ b/plugin-server/tests/cdp/cdp-function-processor.test.ts @@ -80,10 +80,7 @@ const convertToKafkaMessage = (message: any): any => { } } -/** - * NOTE: This isn't fully e2e... We still mock kafka but we trigger one queue from the other in a loop - */ -describe('CDP Consumers E2E', () => { +describe('CDP Function Processor', () => { let processedEventsConsumer: CdpProcessedEventsConsumer let functionProcessor: CdpFunctionCallbackConsumer let hub: Hub @@ -121,7 +118,7 @@ describe('CDP Consumers E2E', () => { jest.useRealTimers() }) - describe('e2e fetch function', () => { + describe('full fetch function', () => { /** * Tests here are somewhat expensive so should mostly simulate happy paths and the more e2e scenarios */ diff --git a/plugin-server/tests/cdp/cdp-processed-events-consumer.test.ts b/plugin-server/tests/cdp/cdp-processed-events-consumer.test.ts index b0a1c09f15d6f..11806c8595a10 100644 --- a/plugin-server/tests/cdp/cdp-processed-events-consumer.test.ts +++ b/plugin-server/tests/cdp/cdp-processed-events-consumer.test.ts @@ -5,12 +5,7 @@ import { Hub, Team } from '../../src/types' import { closeHub, createHub } from '../../src/utils/db/hub' import { getFirstTeam, resetTestDatabase } from '../helpers/sql' import { HOG_EXAMPLES, HOG_FILTERS_EXAMPLES, HOG_INPUTS_EXAMPLES } from './examples' -import { - createHogExecutionGlobals, - createIncomingEvent, - createMessage, - insertHogFunction as _insertHogFunction, -} from './fixtures' +import { createHogExecutionGlobals, insertHogFunction as _insertHogFunction } from './fixtures' const mockConsumer = { on: jest.fn(), @@ -113,10 +108,6 @@ describe('CDP Processed Events Consumer', () => { }) describe('general event processing', () => { - beforeEach(() => { - hub.CDP_EVENT_PROCESSOR_EXECUTE_FIRST_STEP = false - }) - describe('common processing', () => { let fnFetchNoFilters: HogFunctionType let fnPrinterPageviewFilters: HogFunctionType @@ -170,23 +161,89 @@ describe('CDP Processed Events Consumer', () => { matchInvocation(fnPrinterPageviewFilters, globals), ]) - expect(mockProducer.produce).toHaveBeenCalledTimes(2) - + expect(mockProducer.produce).toHaveBeenCalledTimes(11) expect(decodeAllKafkaMessages()).toMatchObject([ { - key: expect.any(String), - topic: 'cdp_function_callbacks_test', + topic: 'log_entries_test', value: { - state: expect.any(String), + message: 'Executing function', + log_source_id: fnFetchNoFilters.id, + }, + }, + { + topic: 'log_entries_test', + value: { + message: "Suspending function due to async function call 'fetch'. Payload: 1902 bytes", + log_source_id: fnFetchNoFilters.id, + }, + }, + { + topic: 'clickhouse_app_metrics2_test', + value: { + app_source: 'hog_function', + team_id: 2, + app_source_id: fnPrinterPageviewFilters.id, + metric_kind: 'success', + metric_name: 'succeeded', + count: 1, + }, + }, + { + topic: 'log_entries_test', + value: { + message: 'Executing function', + log_source_id: fnPrinterPageviewFilters.id, + }, + }, + { + topic: 'log_entries_test', + value: { + message: 'test', + log_source_id: fnPrinterPageviewFilters.id, + }, + }, + { + topic: 'log_entries_test', + value: { + message: '{"nested":{"foo":"***REDACTED***","bool":false,"null":null}}', + log_source_id: fnPrinterPageviewFilters.id, + }, + }, + { + topic: 'log_entries_test', + value: { + message: '{"foo":"***REDACTED***","bool":false,"null":null}', + log_source_id: fnPrinterPageviewFilters.id, + }, + }, + { + topic: 'log_entries_test', + value: { + message: 'substring: ***REDACTED***', + log_source_id: fnPrinterPageviewFilters.id, + }, + }, + { + topic: 'log_entries_test', + value: { + message: + '{"input_1":"test","secret_input_2":{"foo":"***REDACTED***","bool":false,"null":null},"secret_input_3":"***REDACTED***"}', + log_source_id: fnPrinterPageviewFilters.id, + }, + }, + { + topic: 'log_entries_test', + value: { + message: expect.stringContaining('Function completed'), + log_source_id: fnPrinterPageviewFilters.id, }, - waitForAck: true, }, { - key: expect.any(String), topic: 'cdp_function_callbacks_test', value: { state: expect.any(String), }, + key: expect.stringContaining(fnFetchNoFilters.id.toString()), waitForAck: true, }, ]) @@ -199,7 +256,7 @@ describe('CDP Processed Events Consumer', () => { expect(invocations).toHaveLength(1) expect(invocations).toMatchObject([matchInvocation(fnFetchNoFilters, globals)]) - expect(mockProducer.produce).toHaveBeenCalledTimes(2) + expect(mockProducer.produce).toHaveBeenCalledTimes(4) expect(decodeAllKafkaMessages()).toMatchObject([ { @@ -215,6 +272,12 @@ describe('CDP Processed Events Consumer', () => { timestamp: expect.any(String), }, }, + { + topic: 'log_entries_test', + }, + { + topic: 'log_entries_test', + }, { topic: 'cdp_function_callbacks_test', }, @@ -259,97 +322,5 @@ describe('CDP Processed Events Consumer', () => { ]) }) }) - - describe('kafka parsing', () => { - it('can parse incoming messages correctly', async () => { - await insertHogFunction({ - ...HOG_EXAMPLES.simple_fetch, - ...HOG_INPUTS_EXAMPLES.simple_fetch, - ...HOG_FILTERS_EXAMPLES.no_filters, - }) - // Create a message that should be processed by this function - // Run the function and check that it was executed - await processor._handleKafkaBatch([ - createMessage( - createIncomingEvent(team.id, { - uuid: 'b3a1fe86-b10c-43cc-acaf-d208977608d0', - event: '$pageview', - properties: JSON.stringify({ - $lib_version: '1.0.0', - }), - }) - ), - ]) - - // Generall check that the message seemed to get processed - expect(decodeAllKafkaMessages()).toMatchObject([ - { - key: expect.any(String), - topic: 'cdp_function_callbacks_test', - value: { - state: expect.any(String), - }, - waitForAck: true, - }, - ]) - }) - }) - - describe('no delayed execution', () => { - beforeEach(() => { - hub.CDP_EVENT_PROCESSOR_EXECUTE_FIRST_STEP = true - }) - - it('should invoke the initial function before enqueuing', async () => { - await insertHogFunction({ - ...HOG_EXAMPLES.simple_fetch, - ...HOG_INPUTS_EXAMPLES.simple_fetch, - ...HOG_FILTERS_EXAMPLES.no_filters, - }) - // Create a message that should be processed by this function - // Run the function and check that it was executed - await processor._handleKafkaBatch([ - createMessage( - createIncomingEvent(team.id, { - uuid: 'b3a1fe86-b10c-43cc-acaf-d208977608d0', - event: '$pageview', - properties: JSON.stringify({ - $lib_version: '1.0.0', - }), - }) - ), - ]) - - // General check that the message seemed to get processed - expect(decodeAllKafkaMessages()).toMatchObject([ - { - key: expect.any(String), - topic: 'log_entries_test', - value: { - message: 'Executing function', - }, - waitForAck: true, - }, - { - key: expect.any(String), - topic: 'log_entries_test', - value: { - message: expect.stringContaining( - "Suspending function due to async function call 'fetch'. Payload" - ), - }, - waitForAck: true, - }, - { - key: expect.any(String), - topic: 'cdp_function_callbacks_test', - value: { - state: expect.any(String), - }, - waitForAck: true, - }, - ]) - }) - }) }) }) diff --git a/plugin-server/tests/cdp/examples.ts b/plugin-server/tests/cdp/examples.ts index c662e31aae187..a3f696c32187c 100644 --- a/plugin-server/tests/cdp/examples.ts +++ b/plugin-server/tests/cdp/examples.ts @@ -417,6 +417,167 @@ export const HOG_FILTERS_EXAMPLES: Record> = { diff --git a/plugin-server/tests/cdp/helpers/kafka-observer.ts b/plugin-server/tests/cdp/helpers/kafka-observer.ts new file mode 100644 index 0000000000000..462c06fc1e137 --- /dev/null +++ b/plugin-server/tests/cdp/helpers/kafka-observer.ts @@ -0,0 +1,72 @@ +import { KafkaConsumer, Message } from 'node-rdkafka' + +import { createAdminClient, ensureTopicExists } from '../../../src/kafka/admin' +import { createRdConnectionConfigFromEnvVars } from '../../../src/kafka/config' +import { createKafkaConsumer } from '../../../src/kafka/consumer' +import { Hub } from '../../../src/types' +import { delay, UUIDT } from '../../../src/utils/utils' + +export type TestKafkaObserver = { + messages: { + topic: string + value: any + }[] + consumer: KafkaConsumer + stop: () => Promise + expectMessageCount: (count: number) => Promise +} + +export const createKafkaObserver = async (hub: Hub, topics: string[]): Promise => { + const consumer = await createKafkaConsumer({ + ...createRdConnectionConfigFromEnvVars(hub), + 'group.id': `test-group-${new UUIDT().toString()}`, + }) + + const adminClient = createAdminClient(createRdConnectionConfigFromEnvVars(hub)) + await Promise.all(topics.map((topic) => ensureTopicExists(adminClient, topic, 1000))) + adminClient.disconnect() + + await new Promise((res, rej) => consumer.connect({}, (err) => (err ? rej(err) : res()))) + consumer.subscribe(topics) + const messages: { + topic: string + value: any + }[] = [] + + const poll = async () => { + await delay(50) + if (!consumer.isConnected()) { + return + } + const newMessages = await new Promise((res, rej) => + consumer.consume(10, (err, messages) => (err ? rej(err) : res(messages))) + ) + + messages.push( + ...newMessages.map((message) => ({ + topic: message.topic, + value: JSON.parse(message.value?.toString() ?? ''), + })) + ) + poll() + } + + poll() + + return { + messages, + consumer, + stop: () => new Promise((res) => consumer.disconnect(res)), + expectMessageCount: async (count: number): Promise => { + const timeout = 5000 + const now = Date.now() + while (messages.length < count && Date.now() - now < timeout) { + await delay(100) + } + + if (messages.length < count) { + throw new Error(`Expected ${count} messages, got ${messages.length}`) + } + }, + } +} diff --git a/plugin-server/tests/cdp/hog-executor.test.ts b/plugin-server/tests/cdp/hog-executor.test.ts index dc6350e0bb3d2..03addf077d964 100644 --- a/plugin-server/tests/cdp/hog-executor.test.ts +++ b/plugin-server/tests/cdp/hog-executor.test.ts @@ -2,7 +2,7 @@ import { DateTime } from 'luxon' import { HogExecutor } from '../../src/cdp/hog-executor' import { HogFunctionManager } from '../../src/cdp/hog-function-manager' -import { HogFunctionAsyncFunctionResponse, HogFunctionType } from '../../src/cdp/types' +import { HogFunctionInvocation, HogFunctionType } from '../../src/cdp/types' import { HOG_EXAMPLES, HOG_FILTERS_EXAMPLES, HOG_INPUTS_EXAMPLES } from './examples' import { createHogExecutionGlobals, @@ -11,8 +11,9 @@ import { insertHogFunction as _insertHogFunction, } from './fixtures' -const createAsyncFunctionResponse = (response?: Record): HogFunctionAsyncFunctionResponse => { - return { +const setupFetchResponse = (invocation: HogFunctionInvocation, options?: { status?: number; body?: string }): void => { + invocation.queue = 'hog' + invocation.queueParameters = { timings: [ { kind: 'async_function', @@ -20,11 +21,10 @@ const createAsyncFunctionResponse = (response?: Record): HogFunctio }, ], response: { - status: 200, - body: 'success', - ...response, + status: options?.status ?? 200, }, } + invocation.queueBlob = Buffer.from(options?.body ?? 'success') } describe('Hog Executor', () => { @@ -69,6 +69,7 @@ describe('Hog Executor', () => { hogFunction: invocation.hogFunction, queue: 'fetch', queueParameters: expect.any(Object), + queueBlob: expect.any(Buffer), timings: [ { kind: 'hog', @@ -133,7 +134,8 @@ describe('Hog Executor', () => { }, }) - expect(JSON.parse(result.invocation.queueParameters!.body)).toEqual({ + const body = JSON.parse(Buffer.from(result.invocation.queueBlob!).toString()) + expect(body).toEqual({ event: { uuid: 'uuid', name: 'test', @@ -163,8 +165,7 @@ describe('Hog Executor', () => { expect(result.invocation.vmState).toBeDefined() // Simulate what the callback does - result.invocation.queue = 'hog' - result.invocation.queueParameters = createAsyncFunctionResponse() + setupFetchResponse(result.invocation) const secondResult = executor.execute(result.invocation) logs.push(...secondResult.logs) @@ -185,10 +186,7 @@ describe('Hog Executor', () => { it('parses the responses body if a string', () => { const result = executor.execute(createInvocation(hogFunction)) const logs = result.logs.splice(0, 100) - result.invocation.queue = 'hog' - result.invocation.queueParameters = createAsyncFunctionResponse({ - body: JSON.stringify({ foo: 'bar' }), - }) + setupFetchResponse(result.invocation, { body: JSON.stringify({ foo: 'bar' }) }) const secondResult = executor.execute(result.invocation) logs.push(...secondResult.logs) @@ -233,6 +231,156 @@ describe('Hog Executor', () => { expect(resultsShouldMatch.matchingFunctions).toHaveLength(1) expect(resultsShouldMatch.nonMatchingFunctions).toHaveLength(0) }) + + it('can use elements_chain_texts', () => { + const fn = createHogFunction({ + ...HOG_EXAMPLES.simple_fetch, + ...HOG_INPUTS_EXAMPLES.simple_fetch, + ...HOG_FILTERS_EXAMPLES.elements_text_filter, + }) + + mockFunctionManager.getTeamHogFunctions.mockReturnValue([fn]) + const elementsChain = (buttonText: string) => + `span.LemonButton__content:attr__class="LemonButton__content"nth-child="2"nth-of-type="2"text="${buttonText}";span.LemonButton__chrome:attr__class="LemonButton__chrome"nth-child="1"nth-of-type="1";button.LemonButton.LemonButton--has-icon.LemonButton--secondary.LemonButton--status-default:attr__class="LemonButton LemonButton--secondary LemonButton--status-default LemonButton--has-icon"attr__type="button"nth-child="1"nth-of-type="1"text="${buttonText}";div.flex.gap-4.items-center:attr__class="flex gap-4 items-center"nth-child="1"nth-of-type="1";div.flex.flex-wrap.gap-4.justify-between:attr__class="flex gap-4 justify-between flex-wrap"nth-child="3"nth-of-type="3";div.flex.flex-1.flex-col.gap-4.h-full.relative.w-full:attr__class="relative w-full flex flex-col gap-4 flex-1 h-full"nth-child="1"nth-of-type="1";div.LemonTabs__content:attr__class="LemonTabs__content"nth-child="2"nth-of-type="1";div.LemonTabs.LemonTabs--medium:attr__class="LemonTabs LemonTabs--medium"attr__style="--lemon-tabs-slider-width: 48px; --lemon-tabs-slider-offset: 0px;"nth-child="1"nth-of-type="1";div.Navigation3000__scene:attr__class="Navigation3000__scene"nth-child="2"nth-of-type="2";main:nth-child="2"nth-of-type="1";div.Navigation3000:attr__class="Navigation3000"nth-child="1"nth-of-type="1";div:attr__id="root"attr_id="root"nth-child="3"nth-of-type="1";body.overflow-hidden:attr__class="overflow-hidden"attr__theme="light"nth-child="2"nth-of-type="1"` + + const hogGlobals1 = createHogExecutionGlobals({ + groups: {}, + event: { + uuid: 'uuid', + name: '$autocapture', + distinct_id: 'distinct_id', + url: 'http://localhost:8000/events/1', + properties: { + $lib_version: '1.2.3', + $elements_chain: elementsChain('Not our text'), + }, + timestamp: new Date().toISOString(), + }, + }) + + const resultsShouldntMatch = executor.findMatchingFunctions(hogGlobals1) + expect(resultsShouldntMatch.matchingFunctions).toHaveLength(0) + expect(resultsShouldntMatch.nonMatchingFunctions).toHaveLength(1) + + const hogGlobals2 = createHogExecutionGlobals({ + groups: {}, + event: { + uuid: 'uuid', + name: '$autocapture', + distinct_id: 'distinct_id', + url: 'http://localhost:8000/events/1', + properties: { + $lib_version: '1.2.3', + $elements_chain: elementsChain('Reload'), + }, + timestamp: new Date().toISOString(), + }, + }) + + const resultsShouldMatch = executor.findMatchingFunctions(hogGlobals2) + expect(resultsShouldMatch.matchingFunctions).toHaveLength(1) + expect(resultsShouldMatch.nonMatchingFunctions).toHaveLength(0) + }) + + it('can use elements_chain_href', () => { + const fn = createHogFunction({ + ...HOG_EXAMPLES.simple_fetch, + ...HOG_INPUTS_EXAMPLES.simple_fetch, + ...HOG_FILTERS_EXAMPLES.elements_href_filter, + }) + + mockFunctionManager.getTeamHogFunctions.mockReturnValue([fn]) + const elementsChain = (link: string) => + `span.LemonButton__content:attr__class="LemonButton__content"attr__href="${link}"href="${link}"nth-child="2"nth-of-type="2"text="Activity";span.LemonButton__chrome:attr__class="LemonButton__chrome"nth-child="1"nth-of-type="1";a.LemonButton.LemonButton--full-width.LemonButton--has-icon.LemonButton--secondary.LemonButton--status-alt.Link.NavbarButton:attr__class="Link LemonButton LemonButton--secondary LemonButton--status-alt LemonButton--full-width LemonButton--has-icon NavbarButton"attr__data-attr="menu-item-activity"attr__href="${link}"href="${link}"nth-child="1"nth-of-type="1"text="Activity";li.w-full:attr__class="w-full"nth-child="6"nth-of-type="6";ul:nth-child="1"nth-of-type="1";div.Navbar3000__top.ScrollableShadows__inner:attr__class="ScrollableShadows__inner Navbar3000__top"nth-child="1"nth-of-type="1";div.ScrollableShadows.ScrollableShadows--vertical:attr__class="ScrollableShadows ScrollableShadows--vertical"nth-child="1"nth-of-type="1";div.Navbar3000__content:attr__class="Navbar3000__content"nth-child="1"nth-of-type="1";nav.Navbar3000:attr__class="Navbar3000"nth-child="1"nth-of-type="1";div.Navigation3000:attr__class="Navigation3000"nth-child="1"nth-of-type="1";div:attr__id="root"attr_id="root"nth-child="3"nth-of-type="1";body.overflow-hidden:attr__class="overflow-hidden"attr__theme="light"nth-child="2"nth-of-type="1"` + + const hogGlobals1 = createHogExecutionGlobals({ + groups: {}, + event: { + uuid: 'uuid', + name: '$autocapture', + distinct_id: 'distinct_id', + url: 'http://localhost:8000/events/1', + properties: { + $lib_version: '1.2.3', + $elements_chain: elementsChain('/project/1/not-a-link'), + }, + timestamp: new Date().toISOString(), + }, + }) + + const resultsShouldntMatch = executor.findMatchingFunctions(hogGlobals1) + expect(resultsShouldntMatch.matchingFunctions).toHaveLength(0) + expect(resultsShouldntMatch.nonMatchingFunctions).toHaveLength(1) + + const hogGlobals2 = createHogExecutionGlobals({ + groups: {}, + event: { + uuid: 'uuid', + name: '$autocapture', + distinct_id: 'distinct_id', + url: 'http://localhost:8000/events/1', + properties: { + $lib_version: '1.2.3', + $elements_chain: elementsChain('/project/1/activity/explore'), + }, + timestamp: new Date().toISOString(), + }, + }) + + const resultsShouldMatch = executor.findMatchingFunctions(hogGlobals2) + expect(resultsShouldMatch.matchingFunctions).toHaveLength(1) + expect(resultsShouldMatch.nonMatchingFunctions).toHaveLength(0) + }) + + it('can use elements_chain_tags and _ids', () => { + const fn = createHogFunction({ + ...HOG_EXAMPLES.simple_fetch, + ...HOG_INPUTS_EXAMPLES.simple_fetch, + ...HOG_FILTERS_EXAMPLES.elements_tag_and_id_filter, + }) + + mockFunctionManager.getTeamHogFunctions.mockReturnValue([fn]) + const elementsChain = (id: string) => + `a.Link.font-semibold.text-text-3000.text-xl:attr__class="Link font-semibold text-xl text-text-3000"attr__href="/project/1/dashboard/1"attr__id="${id}"attr_id="${id}"href="/project/1/dashboard/1"nth-child="1"nth-of-type="1"text="My App Dashboard";div.ProjectHomepage__dashboardheader__title:attr__class="ProjectHomepage__dashboardheader__title"nth-child="1"nth-of-type="1";div.ProjectHomepage__dashboardheader:attr__class="ProjectHomepage__dashboardheader"nth-child="2"nth-of-type="2";div.ProjectHomepage:attr__class="ProjectHomepage"nth-child="1"nth-of-type="1";div.Navigation3000__scene:attr__class="Navigation3000__scene"nth-child="2"nth-of-type="2";main:nth-child="2"nth-of-type="1";div.Navigation3000:attr__class="Navigation3000"nth-child="1"nth-of-type="1";div:attr__id="root"attr_id="root"nth-child="3"nth-of-type="1";body.overflow-hidden:attr__class="overflow-hidden"attr__theme="light"nth-child="2"nth-of-type="1"` + + const hogGlobals1 = createHogExecutionGlobals({ + groups: {}, + event: { + uuid: 'uuid', + name: '$autocapture', + distinct_id: 'distinct_id', + url: 'http://localhost:8000/events/1', + properties: { + $lib_version: '1.2.3', + $elements_chain: elementsChain('notfound'), + }, + timestamp: new Date().toISOString(), + }, + }) + + const resultsShouldntMatch = executor.findMatchingFunctions(hogGlobals1) + expect(resultsShouldntMatch.matchingFunctions).toHaveLength(0) + expect(resultsShouldntMatch.nonMatchingFunctions).toHaveLength(1) + + const hogGlobals2 = createHogExecutionGlobals({ + groups: {}, + event: { + uuid: 'uuid', + name: '$autocapture', + distinct_id: 'distinct_id', + url: 'http://localhost:8000/events/1', + properties: { + $lib_version: '1.2.3', + $elements_chain: elementsChain('homelink'), + }, + timestamp: new Date().toISOString(), + }, + }) + + const resultsShouldMatch = executor.findMatchingFunctions(hogGlobals2) + expect(resultsShouldMatch.matchingFunctions).toHaveLength(1) + expect(resultsShouldMatch.nonMatchingFunctions).toHaveLength(0) + }) }) describe('async functions', () => { @@ -249,18 +397,16 @@ describe('Hog Executor', () => { // Start the function const result1 = executor.execute(invocation) // Run the response one time simulating a successful fetch - result1.invocation.queue = 'hog' - result1.invocation.queueParameters = createAsyncFunctionResponse() + setupFetchResponse(result1.invocation) const result2 = executor.execute(result1.invocation) expect(result2.finished).toBe(false) expect(result2.error).toBe(undefined) expect(result2.invocation.queue).toBe('fetch') // This time we should see an error for hitting the loop limit - result2.invocation.queue = 'hog' - result2.invocation.queueParameters = createAsyncFunctionResponse() + setupFetchResponse(result2.invocation) const result3 = executor.execute(result1.invocation) - expect(result3.finished).toBe(false) + expect(result3.finished).toBe(true) expect(result3.error).toEqual('Exceeded maximum number of async steps: 2') expect(result3.logs.map((log) => log.message)).toEqual([ 'Resuming function', diff --git a/plugin-server/tests/cdp/hog-function-manager.test.ts b/plugin-server/tests/cdp/hog-function-manager.test.ts index 1624999c93058..3f34fcb4fe378 100644 --- a/plugin-server/tests/cdp/hog-function-manager.test.ts +++ b/plugin-server/tests/cdp/hog-function-manager.test.ts @@ -81,6 +81,7 @@ describe('HogFunctionManager', () => { }) afterEach(async () => { + await manager.stop() await closeHub(hub) }) diff --git a/plugin-server/tests/helpers/expectations.ts b/plugin-server/tests/helpers/expectations.ts new file mode 100644 index 0000000000000..6a4dcf9b3cc53 --- /dev/null +++ b/plugin-server/tests/helpers/expectations.ts @@ -0,0 +1,17 @@ +export const waitForExpect = async (fn: () => T | Promise, timeout = 10_000, interval = 1_000): Promise => { + // Allows for running expectations that are expected to pass eventually. + // This is useful for, e.g. waiting for events to have been ingested into + // the database. + + const start = Date.now() + while (true) { + try { + return await fn() + } catch (error) { + if (Date.now() - start > timeout) { + throw error + } + await new Promise((resolve) => setTimeout(resolve, interval)) + } + } +} diff --git a/plugin-server/tests/main/process-event.test.ts b/plugin-server/tests/main/process-event.test.ts index 9c052ee5acebe..ff9fbcb9d9823 100644 --- a/plugin-server/tests/main/process-event.test.ts +++ b/plugin-server/tests/main/process-event.test.ts @@ -11,16 +11,7 @@ import * as IORedis from 'ioredis' import { DateTime } from 'luxon' import { KAFKA_EVENTS_PLUGIN_INGESTION } from '../../src/config/kafka-topics' -import { - ClickHouseEvent, - Database, - Hub, - LogLevel, - Person, - PluginsServerConfig, - PropertyDefinitionTypeEnum, - Team, -} from '../../src/types' +import { ClickHouseEvent, Database, Hub, LogLevel, Person, PluginsServerConfig, Team } from '../../src/types' import { closeHub, createHub } from '../../src/utils/db/hub' import { PostgresUse } from '../../src/utils/db/postgres' import { personInitialAndUTMProperties } from '../../src/utils/db/utils' @@ -268,9 +259,6 @@ test('capture new person', async () => { ) team = await getFirstTeam(hub) - expect(await hub.db.fetchEventDefinitions()).toEqual([]) - expect(await hub.db.fetchPropertyDefinitions()).toEqual([]) - const properties = personInitialAndUTMProperties({ distinct_id: 2, token: team.api_token, @@ -525,313 +513,6 @@ test('capture new person', async () => { expect(JSON.parse(chPeople3[0].properties)).toEqual(expectedProps) team = await getFirstTeam(hub) - - expect(await hub.db.fetchEventDefinitions()).toEqual([ - { - id: expect.any(String), - name: '$autocapture', - query_usage_30_day: null, - team_id: 2, - volume_30_day: null, - created_at: expect.any(String), - last_seen_at: expect.any(String), - }, - ]) - const received = await hub.db.fetchPropertyDefinitions() - const expected = [ - { - id: expect.any(String), - is_numerical: true, - name: 'distinct_id', - property_type: 'Numeric', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 1, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: 'token', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 1, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: '$browser', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 1, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: '$current_url', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 1, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: '$os', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 1, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: '$browser_version', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 1, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: '$referring_domain', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 1, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: '$referrer', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 1, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: 'utm_medium', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 1, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: 'gclid', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 1, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: 'msclkid', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 1, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: '$ip', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 1, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: 'utm_medium', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 2, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: 'gclid', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 2, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: 'msclkid', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 2, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: '$initial_browser', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 2, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: '$initial_current_url', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 2, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: '$initial_os', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 2, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: '$initial_browser_version', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 2, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: '$initial_referring_domain', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 2, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: '$initial_referrer', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 2, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: '$initial_utm_medium', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 2, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: '$initial_gclid', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 2, - group_type_index: null, - volume_30_day: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: '$initial_msclkid', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - type: 2, - group_type_index: null, - volume_30_day: null, - }, - ] - for (const element of expected) { - // Looping in an array to make it easier to debug - expect(received).toEqual(expect.arrayContaining([element])) - } }) test('capture bad team', async () => { @@ -1916,96 +1597,6 @@ describe('when handling $create_alias', () => { }) }) -test('team event_properties', async () => { - expect(await hub.db.fetchEventDefinitions()).toEqual([]) - expect(await hub.db.fetchEventProperties()).toEqual([]) - expect(await hub.db.fetchPropertyDefinitions()).toEqual([]) - - await processEvent( - 'xxx', - '127.0.0.1', - '', - { event: 'purchase', properties: { price: 299.99, name: 'AirPods Pro' } } as any as PluginEvent, - team.id, - now, - new UUIDT().toString() - ) - - team = await getFirstTeam(hub) - - expect(await hub.db.fetchEventDefinitions()).toEqual([ - { - id: expect.any(String), - name: 'purchase', - query_usage_30_day: null, - team_id: 2, - volume_30_day: null, - created_at: expect.any(String), - last_seen_at: expect.any(String), - }, - ]) - expect(await hub.db.fetchPropertyDefinitions()).toEqual([ - { - id: expect.any(String), - is_numerical: false, - name: '$ip', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - volume_30_day: null, - type: PropertyDefinitionTypeEnum.Event, - group_type_index: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: 'name', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - volume_30_day: null, - type: PropertyDefinitionTypeEnum.Event, - group_type_index: null, - }, - { - id: expect.any(String), - is_numerical: true, - name: 'price', - property_type: 'Numeric', - property_type_format: null, - query_usage_30_day: null, - team_id: 2, - volume_30_day: null, - type: PropertyDefinitionTypeEnum.Event, - group_type_index: null, - }, - ]) - - // flushed every minute normally, triggering flush now, it's tested elsewhere - expect(await hub.db.fetchEventProperties()).toEqual([ - { - id: expect.any(Number), - event: 'purchase', - property: '$ip', - team_id: 2, - }, - { - id: expect.any(Number), - event: 'purchase', - property: 'name', - team_id: 2, - }, - { - id: expect.any(Number), - event: 'purchase', - property: 'price', - team_id: 2, - }, - ]) -}) - test('event name object json', async () => { await processEvent( 'xxx', diff --git a/plugin-server/tests/worker/ingestion/property-definitions-auto-discovery.test.ts b/plugin-server/tests/worker/ingestion/property-definitions-auto-discovery.test.ts deleted file mode 100644 index 88ade64430390..0000000000000 --- a/plugin-server/tests/worker/ingestion/property-definitions-auto-discovery.test.ts +++ /dev/null @@ -1,77 +0,0 @@ -import { PropertyType } from '../../../src/types' -import { detectPropertyDefinitionTypes } from '../../../src/worker/ingestion/property-definitions-auto-discovery' - -describe('property definitions auto discovery', () => { - describe('can detect numbers', () => { - it('can detect "10"', () => { - expect(detectPropertyDefinitionTypes('10', 'anything')).toEqual(PropertyType.String) - }) - - it('can detect 10', () => { - expect(detectPropertyDefinitionTypes(10, 'anything')).toEqual(PropertyType.Numeric) - }) - - it('can detect ""', () => { - expect(detectPropertyDefinitionTypes('', 'anything')).toEqual(PropertyType.String) - }) - - it('can detect null', () => { - expect(detectPropertyDefinitionTypes(null, 'anything')).toEqual(null) - }) - - it('can detect decimals', () => { - expect(detectPropertyDefinitionTypes(1.23, 'anything')).toEqual(PropertyType.Numeric) - }) - - it('can detect version numbers as non numeric', () => { - expect(detectPropertyDefinitionTypes('1.2.3', 'anything')).toEqual(PropertyType.String) - expect(detectPropertyDefinitionTypes('9.7.0', '$app_version')).toEqual(PropertyType.String) - }) - }) - - describe('can detect feature flag properties', () => { - it('detects regular feature flag properties as string', () => { - expect(detectPropertyDefinitionTypes('10', '$feature/my-feature')).toEqual(PropertyType.String) - expect(detectPropertyDefinitionTypes('true', '$feature/my-feature')).toEqual(PropertyType.String) - expect(detectPropertyDefinitionTypes('false', '$feature/my-feature')).toEqual(PropertyType.String) - expect(detectPropertyDefinitionTypes(12, '$feature/my-feature')).toEqual(PropertyType.String) - }) - - it('doesnt detect $feature_interaction properties as string', () => { - expect(detectPropertyDefinitionTypes('true', '$feature_interaction/my-feature')).toEqual( - PropertyType.Boolean - ) - expect(detectPropertyDefinitionTypes('true', '$$feature/my-feature')).toEqual(PropertyType.Boolean) - expect(detectPropertyDefinitionTypes('true', ' $feature/my-feature')).toEqual(PropertyType.Boolean) - expect(detectPropertyDefinitionTypes('true', '$feat/$feature/my-feature')).toEqual(PropertyType.Boolean) - expect(detectPropertyDefinitionTypes('true', '$features/my-feature')).toEqual(PropertyType.Boolean) - expect(detectPropertyDefinitionTypes('["a","b","c"]', '$active_feature_flags')).toEqual(PropertyType.String) - expect(detectPropertyDefinitionTypes(12, 'feature_flag')).toEqual(PropertyType.Numeric) - }) - }) - - describe('can detect $feature_flag_response properties', () => { - it('detects regular feature flag response properties as string', () => { - expect(detectPropertyDefinitionTypes('10', '$feature_flag_response')).toEqual(PropertyType.String) - expect(detectPropertyDefinitionTypes('true', '$feature_flag_response')).toEqual(PropertyType.String) - expect(detectPropertyDefinitionTypes('false', '$feature_flag_response')).toEqual(PropertyType.String) - expect(detectPropertyDefinitionTypes(12, '$feature_flag_response')).toEqual(PropertyType.String) - }) - }) - - describe('can detect $survey_response (the first question in a given survey) properties and cast them to strings', () => { - it('detects regular survey response properties as string', () => { - expect(detectPropertyDefinitionTypes('10', '$survey_response')).toEqual(PropertyType.String) - expect(detectPropertyDefinitionTypes('true', '$survey_response')).toEqual(PropertyType.String) - expect(detectPropertyDefinitionTypes('false', '$survey_response')).toEqual(PropertyType.String) - expect(detectPropertyDefinitionTypes(12, '$survey_response')).toEqual(PropertyType.String) - }) - - it('detects $survey_response_* (any subsequent question in a given survey) as a string', () => { - expect(detectPropertyDefinitionTypes('10', '$survey_response_2')).toEqual(PropertyType.String) - expect(detectPropertyDefinitionTypes('true', '$survey_response_2')).toEqual(PropertyType.String) - expect(detectPropertyDefinitionTypes('false', '$survey_response_2')).toEqual(PropertyType.String) - expect(detectPropertyDefinitionTypes(12, '$survey_response_2')).toEqual(PropertyType.String) - }) - }) -}) diff --git a/plugin-server/tests/worker/ingestion/property-definitions-cache.test.ts b/plugin-server/tests/worker/ingestion/property-definitions-cache.test.ts deleted file mode 100644 index e36c8f0a755bd..0000000000000 --- a/plugin-server/tests/worker/ingestion/property-definitions-cache.test.ts +++ /dev/null @@ -1,117 +0,0 @@ -import { Hub, PropertyDefinitionTypeEnum } from '../../../src/types' -import { closeHub, createHub } from '../../../src/utils/db/hub' -import { PostgresUse } from '../../../src/utils/db/postgres' -import { UUIDT } from '../../../src/utils/utils' -import { PropertyDefinitionsCache } from '../../../src/worker/ingestion/property-definitions-cache' -import { resetTestDatabase } from '../../helpers/sql' - -jest.mock('../../../src/utils/status') -jest.mock('../../../src/utils/posthog', () => ({ - posthog: { - identify: jest.fn(), - capture: jest.fn(), - }, -})) - -describe('PropertyDefinitionsManager()', () => { - let hub: Hub - let cache: PropertyDefinitionsCache - - beforeEach(async () => { - hub = await createHub() - await resetTestDatabase() - - cache = new PropertyDefinitionsCache(hub) - }) - - afterEach(async () => { - await closeHub(hub) - }) - - describe('with pre-existing data', () => { - beforeEach(async () => { - await hub.db.postgres.query( - PostgresUse.COMMON_WRITE, - `INSERT INTO posthog_propertydefinition (id, name, property_type, type, is_numerical, volume_30_day, query_usage_30_day, team_id) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)`, - [ - new UUIDT().toString(), - 'property_name', - 'String', - PropertyDefinitionTypeEnum.Event, - false, - null, - null, - 2, - ], - 'testTag' - ) - await hub.db.postgres.query( - PostgresUse.COMMON_WRITE, - `INSERT INTO posthog_propertydefinition (id, name, property_type, type, is_numerical, volume_30_day, query_usage_30_day, team_id) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)`, - [ - new UUIDT().toString(), - 'numeric_prop', - 'String', - PropertyDefinitionTypeEnum.Event, - true, - null, - null, - 2, - ], - 'testTag' - ) - await hub.db.postgres.query( - PostgresUse.COMMON_WRITE, - `INSERT INTO posthog_propertydefinition (id, name, property_type, type, is_numerical, volume_30_day, query_usage_30_day, team_id) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)`, - [ - new UUIDT().toString(), - 'person_prop', - 'String', - PropertyDefinitionTypeEnum.Person, - false, - null, - null, - 2, - ], - 'testTag' - ) - await hub.db.postgres.query( - PostgresUse.COMMON_WRITE, - `INSERT INTO posthog_propertydefinition (id, name, property_type, type, is_numerical, volume_30_day, query_usage_30_day, team_id, group_type_index) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)`, - [ - new UUIDT().toString(), - 'group_prop', - 'String', - PropertyDefinitionTypeEnum.Group, - false, - null, - null, - 2, - 0, - ], - 'testTag' - ) - }) - - it('initializes cleanly', async () => { - await cache.initialize(2, hub.db) - - expect(cache.propertyDefinitionsCache.get(2)!.keys()).toEqual( - expect.arrayContaining(['30group_prop', '2person_prop', '1numeric_prop', '1property_name']) - ) - }) - - it('reports correct shouldUpdate', async () => { - await cache.initialize(2, hub.db) - - expect(cache.shouldUpdate(2, 'property_name', PropertyDefinitionTypeEnum.Event, null)).toEqual(false) - expect(cache.shouldUpdate(2, 'numeric_prop', PropertyDefinitionTypeEnum.Event, null)).toEqual(false) - expect(cache.shouldUpdate(2, 'person_prop', PropertyDefinitionTypeEnum.Person, null)).toEqual(false) - expect(cache.shouldUpdate(2, 'group_prop', PropertyDefinitionTypeEnum.Group, 0)).toEqual(false) - - expect(cache.shouldUpdate(2, 'new_prop', PropertyDefinitionTypeEnum.Event, null)).toEqual(true) - expect(cache.shouldUpdate(2, 'new_person_prop', PropertyDefinitionTypeEnum.Person, null)).toEqual(true) - expect(cache.shouldUpdate(2, 'group_prop', PropertyDefinitionTypeEnum.Group, 1)).toEqual(true) - }) - }) -}) diff --git a/plugin-server/tests/worker/ingestion/property-definitions-manager.test.ts b/plugin-server/tests/worker/ingestion/property-definitions-manager.test.ts deleted file mode 100644 index 8ec4eaad75859..0000000000000 --- a/plugin-server/tests/worker/ingestion/property-definitions-manager.test.ts +++ /dev/null @@ -1,818 +0,0 @@ -import { DateTime, Settings } from 'luxon' - -import { DateTimePropertyTypeFormat, Hub, PropertyDefinitionTypeEnum, PropertyType } from '../../../src/types' -import { closeHub, createHub } from '../../../src/utils/db/hub' -import { PostgresUse } from '../../../src/utils/db/postgres' -import { posthog } from '../../../src/utils/posthog' -import { UUIDT } from '../../../src/utils/utils' -import { GroupTypeManager } from '../../../src/worker/ingestion/group-type-manager' -import { dateTimePropertyTypeFormatPatterns } from '../../../src/worker/ingestion/property-definitions-auto-discovery' -import { NULL_AFTER_PROPERTY_TYPE_DETECTION } from '../../../src/worker/ingestion/property-definitions-cache' -import { PropertyDefinitionsManager } from '../../../src/worker/ingestion/property-definitions-manager' -import { createOrganization, createTeam } from '../../helpers/sql' - -jest.mock('../../../src/utils/status') -jest.mock('../../../src/utils/posthog', () => ({ - posthog: { - identify: jest.fn(), - capture: jest.fn(), - }, -})) - -describe('PropertyDefinitionsManager()', () => { - let hub: Hub - let manager: PropertyDefinitionsManager - let teamId: number - let organizationId: string - let groupTypeManager: GroupTypeManager - - beforeEach(async () => { - hub = await createHub() - organizationId = await createOrganization(hub.db.postgres) - teamId = await createTeam(hub.db.postgres, organizationId) - groupTypeManager = new GroupTypeManager(hub.postgres, hub.teamManager, hub.SITE_URL) - manager = new PropertyDefinitionsManager(hub.teamManager, groupTypeManager, hub.db, hub) - - Settings.defaultZoneName = 'utc' - }) - - afterEach(async () => { - await closeHub(hub) - }) - - describe('updateEventNamesAndProperties()', () => { - describe('base tests', () => { - beforeEach(async () => { - await hub.db.postgres.query( - PostgresUse.COMMON_WRITE, - `INSERT INTO posthog_eventdefinition (id, name, volume_30_day, query_usage_30_day, team_id, created_at) VALUES ($1, $2, $3, $4, $5, NOW())`, - [new UUIDT().toString(), '$pageview', 3, 2, teamId], - 'testTag' - ) - await hub.db.postgres.query( - PostgresUse.COMMON_WRITE, - `INSERT INTO posthog_eventdefinition (id, name, team_id, created_at, last_seen_at) VALUES ($1, $2, $3, NOW(), $4)`, - [new UUIDT().toString(), 'another_test_event', teamId, '2014-03-23T23:23:23Z'], - 'testTag' - ) - await hub.db.postgres.query( - PostgresUse.COMMON_WRITE, - `INSERT INTO posthog_propertydefinition (id, name, type, is_numerical, volume_30_day, query_usage_30_day, team_id) VALUES ($1, $2, $3, $4, $5, $6, $7)`, - [ - new UUIDT().toString(), - 'property_name', - PropertyDefinitionTypeEnum.Event, - false, - null, - null, - teamId, - ], - 'testTag' - ) - await hub.db.postgres.query( - PostgresUse.COMMON_WRITE, - `INSERT INTO posthog_propertydefinition (id, name, type, is_numerical, volume_30_day, query_usage_30_day, team_id) VALUES ($1, $2, $3, $4, $5, $6, $7)`, - [ - new UUIDT().toString(), - 'numeric_prop', - PropertyDefinitionTypeEnum.Event, - true, - null, - null, - teamId, - ], - 'testTag' - ) - await hub.db.postgres.query( - PostgresUse.COMMON_WRITE, - `INSERT INTO posthog_eventproperty (event, property, team_id) VALUES ($1, $2, $3)`, - ['new-event', 'numeric_prop', teamId], - 'testTag' - ) - }) - - it('updates event properties', async () => { - jest.spyOn(global.Date, 'now').mockImplementation(() => new Date('2020-02-27T11:00:36.000Z').getTime()) - - await manager.updateEventNamesAndProperties(teamId, 'new-event', { - property_name: 'efg', - number: 4, - numeric_prop: 5, - }) - - const eventDefinitions = await hub.db.fetchEventDefinitions(teamId) - - expect(eventDefinitions).toEqual([ - { - id: expect.any(String), - name: '$pageview', - query_usage_30_day: 2, - team_id: teamId, - volume_30_day: 3, - last_seen_at: null, - created_at: expect.any(String), - }, - { - id: expect.any(String), - name: 'another_test_event', - query_usage_30_day: null, - team_id: teamId, - volume_30_day: null, - last_seen_at: '2014-03-23T23:23:23.000Z', // values are not updated directly - created_at: expect.any(String), - }, - { - id: expect.any(String), - name: 'new-event', - query_usage_30_day: null, - team_id: teamId, - volume_30_day: null, - last_seen_at: '2020-02-27T11:00:36.000Z', // overridden Date.now() - created_at: expect.any(String), - }, - ]) - - for (const eventDef of eventDefinitions) { - if (eventDef.name === 'new-event') { - const parsedLastSeen = DateTime.fromISO(eventDef.last_seen_at) - expect(parsedLastSeen.diff(DateTime.now()).seconds).toBeCloseTo(0) - - const parsedCreatedAt = DateTime.fromISO(eventDef.created_at) - expect(parsedCreatedAt.diff(DateTime.now()).seconds).toBeCloseTo(0) - } - } - - expect(await hub.db.fetchEventProperties(teamId)).toEqual([ - { - id: expect.any(Number), - event: 'new-event', - property: 'number', - team_id: teamId, - }, - { - id: expect.any(Number), - event: 'new-event', - property: 'numeric_prop', - team_id: teamId, - }, - { - id: expect.any(Number), - event: 'new-event', - property: 'property_name', - team_id: teamId, - }, - ]) - - expect(await hub.db.fetchPropertyDefinitions(teamId)).toEqual([ - { - id: expect.any(String), - is_numerical: true, - name: 'number', - property_type: 'Numeric', - property_type_format: null, - query_usage_30_day: null, - team_id: teamId, - volume_30_day: null, - type: PropertyDefinitionTypeEnum.Event, - group_type_index: null, - }, - { - id: expect.any(String), - is_numerical: true, - name: 'numeric_prop', - property_type: 'Numeric', - property_type_format: null, - query_usage_30_day: null, - team_id: teamId, - volume_30_day: null, - type: PropertyDefinitionTypeEnum.Event, - group_type_index: null, - }, - { - id: expect.any(String), - is_numerical: false, - name: 'property_name', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: teamId, - volume_30_day: null, - type: PropertyDefinitionTypeEnum.Event, - group_type_index: null, - }, - ]) - }) - - it('sets or updates eventLastSeenCache', async () => { - jest.spyOn(global.Date, 'now').mockImplementation(() => new Date('2015-04-04T04:04:04.000Z').getTime()) - - expect(manager.eventLastSeenCache.length).toEqual(0) - await manager.updateEventNamesAndProperties(teamId, 'another_test_event', {}) - expect(manager.eventLastSeenCache.length).toEqual(1) - expect(manager.eventLastSeenCache.get(`[${teamId},"another_test_event"]`)).toEqual(20150404) - - // Start tracking queries - const postgresQuery = jest.spyOn(manager.db.postgres, 'query') - - // New event, 10 sec later (all caches should be hit) - jest.spyOn(global.Date, 'now').mockImplementation(() => new Date('2015-04-04T04:04:14.000Z').getTime()) - await manager.updateEventNamesAndProperties(teamId, 'another_test_event', {}) - expect(postgresQuery).not.toHaveBeenCalled() - - // New event, 1 day later (all caches should be empty) - jest.spyOn(global.Date, 'now').mockImplementation(() => new Date('2015-04-05T04:04:14.000Z').getTime()) - await manager.updateEventNamesAndProperties(teamId, 'another_test_event', {}) - expect(postgresQuery).toHaveBeenCalledWith( - PostgresUse.COMMON_WRITE, - 'UPDATE posthog_eventdefinition SET last_seen_at=$1 WHERE team_id=$2 AND name=$3', - [DateTime.now(), teamId, 'another_test_event'], - 'updateEventLastSeenAt' - ) - - // Re-ingest, should add no queries - postgresQuery.mockClear() - await manager.updateEventNamesAndProperties(teamId, 'another_test_event', {}) - expect(postgresQuery).not.toHaveBeenCalled() - - expect(manager.eventLastSeenCache.length).toEqual(1) - expect(manager.eventLastSeenCache.get(`[${teamId},"another_test_event"]`)).toEqual(20150405) - }) - - it('does not capture event', async () => { - await manager.updateEventNamesAndProperties(teamId, 'new-event', { property_name: 'efg', number: 4 }) - - expect(posthog.capture).not.toHaveBeenCalled() - }) - - it('handles cache invalidation properly', async () => { - await manager.teamManager.fetchTeam(teamId) - await manager.cacheEventNamesAndProperties(teamId, '$foobar') - await hub.db.postgres.query( - PostgresUse.COMMON_WRITE, - `INSERT INTO posthog_eventdefinition (id, name, volume_30_day, query_usage_30_day, team_id) VALUES ($1, $2, NULL, NULL, $3) ON CONFLICT DO NOTHING`, - [new UUIDT().toString(), '$foobar', teamId], - 'insertEventDefinition' - ) - - jest.spyOn(manager.teamManager, 'fetchTeam') - jest.spyOn(hub.db.postgres, 'query') - - // Scenario: Different request comes in, team gets reloaded in the background with no updates - await manager.updateEventNamesAndProperties(teamId, '$foobar', {}) - expect(manager.teamManager.fetchTeam).toHaveBeenCalledTimes(1) - expect(hub.db.postgres.query).toHaveBeenCalledTimes(1) - - // Scenario: Next request but a real update - jest.mocked(manager.teamManager.fetchTeam).mockClear() - jest.mocked(hub.db.postgres.query).mockClear() - - await manager.updateEventNamesAndProperties(teamId, '$newevent', {}) - expect(manager.teamManager.fetchTeam).toHaveBeenCalledTimes(1) - // extra query for `cacheEventNamesAndProperties` that we did manually before - expect(hub.db.postgres.query).toHaveBeenCalledTimes(2) - }) - }) - - it('saves person property definitions', async () => { - await manager.updateEventNamesAndProperties(teamId, 'new-event', { - $set: { - foo: 'bar', - }, - $set_once: { - numeric: 123, - }, - }) - - expect(await hub.db.fetchPropertyDefinitions(teamId)).toEqual([ - { - id: expect.any(String), - is_numerical: false, - name: 'foo', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: teamId, - volume_30_day: null, - type: PropertyDefinitionTypeEnum.Person, - group_type_index: null, - }, - { - id: expect.any(String), - is_numerical: true, - name: 'numeric', - property_type: 'Numeric', - property_type_format: null, - query_usage_30_day: null, - team_id: teamId, - volume_30_day: null, - type: PropertyDefinitionTypeEnum.Person, - group_type_index: null, - }, - ]) - }) - - it('saves group property definitions', async () => { - await groupTypeManager.insertGroupType(teamId, 'project', 0) - await groupTypeManager.insertGroupType(teamId, 'organization', 1) - - await manager.updateEventNamesAndProperties(teamId, '$groupidentify', { - $group_type: 'organization', - $group_key: 'org::5', - $group_set: { - foo: 'bar', - numeric: 3, - }, - }) - - expect(await hub.db.fetchPropertyDefinitions(teamId)).toEqual([ - { - id: expect.any(String), - is_numerical: false, - name: 'foo', - property_type: 'String', - property_type_format: null, - query_usage_30_day: null, - team_id: teamId, - volume_30_day: null, - type: PropertyDefinitionTypeEnum.Group, - group_type_index: 1, - }, - { - id: expect.any(String), - is_numerical: true, - name: 'numeric', - property_type: 'Numeric', - property_type_format: null, - query_usage_30_day: null, - team_id: teamId, - volume_30_day: null, - type: PropertyDefinitionTypeEnum.Group, - group_type_index: 1, - }, - ]) - }) - - it('regression tests: handles group set properties being empty', async () => { - // See details of the regression - // [here](https://posthog.slack.com/archives/C0460J93NBU/p1676384802876269) - // - // We were essentially failing and throwing a Sentry error if the - // group properties was no an object. This test would throw before - // the fix. - await groupTypeManager.insertGroupType(teamId, 'project', 0) - await groupTypeManager.insertGroupType(teamId, 'organization', 1) - - await manager.updateEventNamesAndProperties(teamId, '$groupidentify', { - $group_type: 'organization', - $group_key: 'org::5', - $group_set: null, - }) - }) - - it('regression tests: handles group type property being empty', async () => { - await groupTypeManager.insertGroupType(teamId, 'project', 0) - await groupTypeManager.insertGroupType(teamId, 'organization', 1) - - await manager.updateEventNamesAndProperties(teamId, '$groupidentify', { - $group_key: 'org::5', - $group_set: { - foo: 'bar', - numeric: 3, - }, - }) - }) - - it('regression tests: 400 characters fit in property definitions', async () => { - await groupTypeManager.insertGroupType(teamId, 'project', 0) - await groupTypeManager.insertGroupType(teamId, 'organization', 1) - - const fourHundredSmileys = '😀'.repeat(400) - const properties = {} - properties[fourHundredSmileys] = 'foo' - await manager.updateEventNamesAndProperties(teamId, fourHundredSmileys, properties) - - expect(await hub.db.fetchPropertyDefinitions(teamId)).toEqual([ - expect.objectContaining({ - id: expect.any(String), - team_id: teamId, - name: fourHundredSmileys, - is_numerical: false, - property_type: 'String', - }), - ]) - }) - - it('regression tests: >400 characters are ignored in property definitions', async () => { - await groupTypeManager.insertGroupType(teamId, 'project', 0) - await groupTypeManager.insertGroupType(teamId, 'organization', 1) - - const fourHundredAndOneSmileys = '😀'.repeat(401) - const properties = {} - properties[fourHundredAndOneSmileys] = 'foo' - - // Note that this shouldn't throw, the large values are just skipped. - await manager.updateEventNamesAndProperties(teamId, fourHundredAndOneSmileys, properties) - - expect(await hub.db.fetchPropertyDefinitions(teamId)).toEqual([]) - }) - - describe('auto-detection of property types', () => { - const randomInteger = () => Math.floor(Math.random() * 1000) + 1 - const randomString = () => [...Array(10)].map(() => (~~(Math.random() * 36)).toString(36)).join('') - - it('adds no type for objects', async () => { - await manager.updateEventNamesAndProperties(teamId, 'another_test_event', { - anObjectProperty: { anything: randomInteger() }, - }) - - expect(manager.propertyDefinitionsCache.get(teamId)?.peek('1anObjectProperty')).toEqual( - NULL_AFTER_PROPERTY_TYPE_DETECTION - ) - - expect(await hub.db.fetchPropertyDefinitions(teamId)).toEqual([ - expect.objectContaining({ - id: expect.any(String), - team_id: teamId, - name: 'anObjectProperty', - is_numerical: false, - property_type: null, - }), - ]) - }) - - const boolTestCases = [ - true, - false, - 'true', - 'false', - 'True', - 'False', - 'TRUE', - 'FALSE', - ' true ', - ' false', - 'true ', - ] - boolTestCases.forEach((testcase) => { - it(`identifies ${testcase} as a boolean`, async () => { - await manager.updateEventNamesAndProperties(teamId, 'another_test_event', { - some_bool: testcase, - }) - - expect(manager.propertyDefinitionsCache.get(teamId)?.peek('1some_bool')).toEqual('Boolean') - - expect(await hub.db.fetchPropertyDefinitions(teamId)).toEqual([ - expect.objectContaining({ - id: expect.any(String), - team_id: teamId, - name: 'some_bool', - is_numerical: false, - property_type: 'Boolean', - }), - ]) - }) - }) - - // i.e. not using truthiness to detect whether something is boolean - const notBoolTestCases = [0, 1, '0', '1', 'yes', 'no', null, undefined, '', [], ' '] - notBoolTestCases.forEach((testcase) => { - it(`does not identify ${testcase} as a boolean`, async () => { - await manager.updateEventNamesAndProperties(teamId, 'another_test_event', { - some_bool: testcase, - }) - - expect(manager.propertyDefinitionsCache.get(teamId)?.peek('1some_bool')).not.toEqual('Boolean') - }) - }) - - it('identifies a numeric type', async () => { - await manager.updateEventNamesAndProperties(teamId, 'another_test_event', { - some_number: randomInteger(), - }) - - expect(manager.propertyDefinitionsCache.get(teamId)?.peek('1some_number')).toEqual('Numeric') - - expect(await hub.db.fetchPropertyDefinitions(teamId)).toEqual([ - expect.objectContaining({ - id: expect.any(String), - team_id: teamId, - name: 'some_number', - is_numerical: true, - property_type: 'Numeric', - }), - ]) - }) - - it('identifies a numeric type sent as a string... as a string', async () => { - await manager.updateEventNamesAndProperties(teamId, 'another_test_event', { - some_number: String(randomInteger()), - }) - - expect(manager.propertyDefinitionsCache.get(teamId)?.peek('1some_number')).toEqual('String') - - expect(await hub.db.fetchPropertyDefinitions(teamId)).toEqual([ - expect.objectContaining({ - id: expect.any(String), - team_id: teamId, - name: 'some_number', - property_type: 'String', - }), - ]) - }) - - it('identifies a string type', async () => { - await manager.updateEventNamesAndProperties(teamId, 'another_test_event', { - some_string: randomString(), - }) - - expect(manager.propertyDefinitionsCache.get(teamId)?.peek('1some_string')).toEqual('String') - - expect(await hub.db.fetchPropertyDefinitions(teamId)).toEqual([ - expect.objectContaining({ - id: expect.any(String), - team_id: teamId, - name: 'some_string', - is_numerical: false, - property_type: 'String', - }), - ]) - }) - - // there are several cases that can be identified as timestamps - // and each might match with time or timestamp in the property key - // but won't match if neither is in it - const unixTimestampTestCases = [ - { - propertyKey: 'unix timestamp with fractional seconds as a number', - date: 1234567890.123, - expectedPropertyType: PropertyType.DateTime, - }, - { - propertyKey: 'unix timestamp with five decimal places of fractional seconds as a number', - date: 1234567890.12345, - expectedPropertyType: PropertyType.DateTime, - }, - { - propertyKey: 'unix timestamp as a number', - date: 1234567890, - expectedPropertyType: PropertyType.DateTime, - }, - { - propertyKey: 'unix timestamp with fractional seconds as a string', - date: '1234567890.123', - expectedPropertyType: PropertyType.String, - }, - { - propertyKey: 'unix timestamp with five decimal places of fractional seconds as a string', - date: '1234567890.12345', - expectedPropertyType: PropertyType.String, - }, - { - propertyKey: 'unix timestamp as a string', - date: '1234567890', - expectedPropertyType: PropertyType.String, - }, - { - propertyKey: 'unix timestamp in milliseconds as a number', - date: 1234567890123, - expectedPropertyType: PropertyType.DateTime, - }, - { - propertyKey: 'unix timestamp in milliseconds as a string', - date: '1234567890123', - expectedPropertyType: PropertyType.String, - }, - ].flatMap((testcase) => { - const toEdit = testcase - - const toMatchWithJustTimeInName = { - ...toEdit, - propertyKey: testcase.propertyKey.replace('timestamp', 'time'), - } - - const toNotMatch = { - ...toEdit, - propertyKey: toEdit.propertyKey.replace('timestamp', 'as a string'), - expectedPropertyType: typeof toEdit.date === 'number' ? PropertyType.Numeric : PropertyType.String, - } - - return [testcase, toMatchWithJustTimeInName, toNotMatch] - }) - - unixTimestampTestCases.forEach((testcase) => { - it(`with key ${testcase.propertyKey} matches ${testcase.date} as ${testcase.expectedPropertyType}`, async () => { - const properties: Record = {} - properties[testcase.propertyKey] = testcase.date - await manager.updateEventNamesAndProperties(teamId, 'another_test_event', properties) - - expect(await hub.db.fetchPropertyDefinitions(teamId)).toEqual([ - expect.objectContaining({ - id: expect.any(String), - team_id: teamId, - name: testcase.propertyKey, - is_numerical: testcase.expectedPropertyType === PropertyType.Numeric, - property_type: testcase.expectedPropertyType, - }), - ]) - }) - }) - - // most datetimes can be identified by replacing the date parts with numbers - // RFC 822 formatted dates as it has a short name for the month instead of a two-digit number - const dateTimeFormatTestCases: { - propertyKey: string - date: string - }[] = Object.keys(dateTimePropertyTypeFormatPatterns).flatMap((patternEnum: string) => { - const patternDescription: string = - DateTimePropertyTypeFormat[patternEnum as keyof typeof DateTimePropertyTypeFormat] - if (patternDescription === 'rfc_822') { - return { - propertyKey: 'an_rfc_822_format_date', - date: 'Wed, 02 Oct 2002 15:00:00 +0200', - } - } else if (patternDescription === DateTimePropertyTypeFormat.ISO8601_DATE) { - return [ - { - propertyKey: 'an_iso_8601_format_date_2022-01-15T11:18:49.233056+00', - date: '2022-01-15T11:18:49.233056+00:00', - }, - { - propertyKey: 'an_iso_8601_format_date_2022-01-15T11:18:49.233056-00', - date: '2022-01-15T11:18:49.233056-00:00', - }, - { - propertyKey: 'an_iso_8601_format_date_2022-01-15T11:18:49.233056+04', - date: '2022-01-15T11:18:49.233056+04:00', - }, - { - propertyKey: 'an_iso_8601_format_date_2022-01-15T11:18:49.233056-04', - date: '2022-01-15T11:18:49.233056-04:00', - }, - { - propertyKey: 'an_iso_8601_format_date_2022-01-15T11:18:49.233056z', - date: '2022-01-15T11:18:49.233056z', - }, - { - propertyKey: 'an_iso_8601_format_date_2022-01-15T11:18:49.233+00:00', - date: '2022-01-15T11:18:49.233+00:00', - }, - { - propertyKey: 'an_iso_8601_format_date_2022-01-15T11:18:49.233-00:00', - date: '2022-01-15T11:18:49.233-00:00', - }, - { - propertyKey: 'an_iso_8601_format_date_2022-01-15T11:18:49.233+04:00', - date: '2022-01-15T11:18:49.233+04:00', - }, - { - propertyKey: 'an_iso_8601_format_date_2022-01-15T11:18:49.233-04:00', - date: '2022-01-15T11:18:49.233-04:00', - }, - { - propertyKey: 'an_iso_8601_format_date_2022-01-15T11:18:49.233z', - date: '2022-01-15T11:18:49.233z', - }, - { - propertyKey: 'an_iso_8601_format_date_2022-01-15T11:18:49+00:00', - date: '2022-01-15T11:18:49+00:00', - }, - { - propertyKey: 'an_iso_8601_format_date_2022-01-15T11:18:49-00:00', - date: '2022-01-15T11:18:49-00:00', - }, - { - propertyKey: 'an_iso_8601_format_date_2022-01-15T11:18:49+04:00', - date: '2022-01-15T11:18:49+04:00', - }, - { - propertyKey: 'an_iso_8601_format_date_2022-01-15T11:18:49-04:00', - date: '2022-01-15T11:18:49-04:00', - }, - { - propertyKey: 'an_iso_8601_format_date_2022-01-15T11:18:49z', - date: '2022-01-15T11:18:49z', - }, - { - propertyKey: 'an_iso_8601_format_date_2022-01-15T11:18:49+11', - date: '2022-01-15T11:18:49+11', - }, - { - propertyKey: 'an_iso_8601_format_date_2022-01-15T11:18:49+0530', - date: '2022-01-15T11:18:49+0530', - }, - ] - } else { - const date = patternDescription - .replace('YYYY', '2021') - .replace('MM', '04') - .replace('DD', '01') - .replace('hh', '13') - .replace('mm', '01') - .replace('ss', '01') - - //iso timestamps can have fractional parts of seconds - if (date.includes('T')) { - return [ - { propertyKey: patternDescription, date }, - { propertyKey: patternDescription, date: date.replace('Z', '.243Z') }, - ] - } else { - return { propertyKey: patternDescription, date } - } - } - }) - - dateTimeFormatTestCases.forEach((testcase) => { - it(`matches ${testcase.date} as DateTime`, async () => { - const properties: Record = {} - properties[testcase.propertyKey] = testcase.date - await manager.updateEventNamesAndProperties(teamId, 'another_test_event', properties) - - expect(await hub.db.fetchPropertyDefinitions(teamId)).toEqual([ - expect.objectContaining({ - id: expect.any(String), - team_id: teamId, - name: testcase.propertyKey, - is_numerical: false, - property_type: PropertyType.DateTime, - }), - ]) - }) - }) - - it('does identify type if the property was previously saved with no type', async () => { - await manager.db.postgres.query( - PostgresUse.COMMON_WRITE, - 'INSERT INTO posthog_propertydefinition (id, name, type, is_numerical, volume_30_day, query_usage_30_day, team_id, property_type) VALUES ($1, $2, $3, $4, NULL, NULL, $5, $6)', - [new UUIDT().toString(), 'a_timestamp', PropertyDefinitionTypeEnum.Event, false, teamId, null], - 'testTag' - ) - - await manager.updateEventNamesAndProperties(teamId, 'a_test_event', { - a_timestamp: 1234567890, - }) - - const results = await manager.db.postgres.query( - PostgresUse.COMMON_WRITE, - ` - SELECT property_type from posthog_propertydefinition - where name=$1 - `, - ['a_timestamp'], - 'queryForProperty' - ) - expect(results.rows[0]).toEqual({ property_type: 'DateTime' }) - }) - - it('does not replace property type if the property was previously saved with a different type', async () => { - await manager.db.postgres.query( - PostgresUse.COMMON_WRITE, - 'INSERT INTO posthog_propertydefinition (id, name, type, is_numerical, volume_30_day, query_usage_30_day, team_id, property_type) VALUES ($1, $2, $3, $4, NULL, NULL, $5, $6)', - [ - new UUIDT().toString(), - 'a_prop_with_type', - PropertyDefinitionTypeEnum.Event, - false, - teamId, - PropertyType.DateTime, - ], - 'testTag' - ) - - await manager.updateEventNamesAndProperties(teamId, 'a_test_event', { - a_prop_with_type: 1234567890, - }) - - const results = await manager.db.postgres.query( - PostgresUse.COMMON_WRITE, - ` - SELECT property_type from posthog_propertydefinition - where name=$1 - `, - ['a_prop_with_type'], - 'queryForProperty' - ) - expect(results.rows[0]).toEqual({ - property_type: PropertyType.DateTime, - }) - }) - - it('does not keep trying to set a property type when it cannot', async () => { - const postgresQuery = jest.spyOn(hub.db.postgres, 'query') - - const properties = { - a_prop_with_a_type_we_do_not_set: { a: 1234567890 }, - } - await manager.updateEventNamesAndProperties(teamId, 'a_test_event', properties) - - // 7 calls to DB to set up team manager and updateEventNamesAndProperties - expect(postgresQuery.mock.calls).toHaveLength(7) - - await manager.updateEventNamesAndProperties(teamId, 'a_test_event', properties) - - // no more calls to DB as everything is cached - expect(postgresQuery.mock.calls).toHaveLength(7) - }) - }) - }) -}) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 813be57c694d7..79813cc6c2725 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -19,9 +19,6 @@ patchedDependencies: path: patches/rrweb@2.0.0-alpha.13.patch dependencies: - '@ant-design/icons': - specifier: ^4.7.0 - version: 4.7.0(react-dom@18.2.0)(react@18.2.0) '@babel/runtime': specifier: ^7.24.0 version: 7.24.0 @@ -53,8 +50,8 @@ dependencies: specifier: 4.6.0 version: 4.6.0(monaco-editor@0.49.0)(react-dom@18.2.0)(react@18.2.0) '@posthog/hogvm': - specifier: ^1.0.44 - version: 1.0.44(luxon@3.5.0) + specifier: ^1.0.47 + version: 1.0.47(luxon@3.5.0) '@posthog/icons': specifier: 0.8.1 version: 0.8.1(react-dom@18.2.0)(react@18.2.0) @@ -272,8 +269,8 @@ dependencies: specifier: ^9.3.0 version: 9.3.0(postcss@8.4.31) posthog-js: - specifier: 1.160.3 - version: 1.160.3 + specifier: 1.161.1 + version: 1.161.1 posthog-js-lite: specifier: 3.0.0 version: 3.0.0 @@ -374,7 +371,7 @@ dependencies: optionalDependencies: fsevents: specifier: ^2.3.2 - version: 2.3.2 + version: 2.3.3 devDependencies: '@babel/core': @@ -855,32 +852,6 @@ packages: '@jridgewell/gen-mapping': 0.1.1 '@jridgewell/trace-mapping': 0.3.17 - /@ant-design/colors@6.0.0: - resolution: {integrity: sha512-qAZRvPzfdWHtfameEGP2Qvuf838NhergR35o+EuVyB5XvSA98xod5r4utvi4TJ3ywmevm290g9nsCG5MryrdWQ==} - dependencies: - '@ctrl/tinycolor': 3.4.1 - dev: false - - /@ant-design/icons-svg@4.2.1: - resolution: {integrity: sha512-EB0iwlKDGpG93hW8f85CTJTs4SvMX7tt5ceupvhALp1IF44SeUFOMhKUOYqpsoYWQKAOuTRDMqn75rEaKDp0Xw==} - dev: false - - /@ant-design/icons@4.7.0(react-dom@18.2.0)(react@18.2.0): - resolution: {integrity: sha512-aoB4Z7JA431rt6d4u+8xcNPPCrdufSRMUOpxa1ab6mz1JCQZOEVolj2WVs/tDFmN62zzK30mNelEsprLYsSF3g==} - engines: {node: '>=8'} - peerDependencies: - react: '>=16.0.0' - react-dom: '>=16.0.0' - dependencies: - '@ant-design/colors': 6.0.0 - '@ant-design/icons-svg': 4.2.1 - '@babel/runtime': 7.24.0 - classnames: 2.3.2 - rc-util: 5.24.4(react-dom@18.2.0)(react@18.2.0) - react: 18.2.0 - react-dom: 18.2.0(react@18.2.0) - dev: false - /@aw-web-design/x-default-browser@1.4.126: resolution: {integrity: sha512-Xk1sIhyNC/esHGGVjL/niHLowM0csl/kFO5uawBy4IrWwy0o1G8LGt3jP6nmWGz+USxeeqbihAmp/oVZju6wug==} hasBin: true @@ -4142,11 +4113,6 @@ packages: postcss-selector-parser: 6.0.15 dev: false - /@ctrl/tinycolor@3.4.1: - resolution: {integrity: sha512-ej5oVy6lykXsvieQtqZxCOaLT+xD4+QNarq78cIYISHmZXshCvROLudpQN3lfL8G0NL7plMSSK+zlyvCaIJ4Iw==} - engines: {node: '>=10'} - dev: false - /@cypress/request@3.0.1: resolution: {integrity: sha512-TWivJlJi8ZDx2wGOw1dbLuHJKUYX7bWySw377nlnGOW3hP9/MUKIsEdXT/YngWxVdgNCHRBmFlBipE+5/2ZZlQ==} engines: {node: '>= 6'} @@ -5414,8 +5380,8 @@ packages: resolution: {integrity: sha512-50/17A98tWUfQ176raKiOGXuYpLyyVMkxxG6oylzL3BPOlA6ADGdK7EYunSa4I064xerltq9TGXs8HmOk5E+vw==} dev: false - /@posthog/hogvm@1.0.44(luxon@3.5.0): - resolution: {integrity: sha512-Ss7gTPyvPyviNipVQOqnsCa66IMmMf+DEg7iX/vQMcWDuFwvHNbdze1iwFVoXCjLci+h8SW2rOMPB0S5A2jJXg==} + /@posthog/hogvm@1.0.47(luxon@3.5.0): + resolution: {integrity: sha512-6zoMKH6fmO9evuhpd9dtkVi2GFk+dGsxcHe6l6mMo7yXd0Kwssmiwr8aLndYTsjdhRkVMi7iaoi/GLrh8F9ayQ==} peerDependencies: luxon: ^3.4.4 dependencies: @@ -13077,6 +13043,7 @@ packages: engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} os: [darwin] requiresBuild: true + dev: true optional: true /fsevents@2.3.3: @@ -15427,7 +15394,7 @@ packages: image-size: 0.5.5 make-dir: 2.1.0 mime: 1.6.0 - native-request: 1.1.0 + native-request: 1.1.2 source-map: 0.6.1 dev: true @@ -16163,8 +16130,8 @@ packages: engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} hasBin: true - /native-request@1.1.0: - resolution: {integrity: sha512-uZ5rQaeRn15XmpgE0xoPL8YWqcX90VtCFglYwAgkvKM5e8fog+vePLAhHxuuv/gRkrQxIeh5U3q9sMNUrENqWw==} + /native-request@1.1.2: + resolution: {integrity: sha512-/etjwrK0J4Ebbcnt35VMWnfiUX/B04uwGJxyJInagxDqf2z5drSt/lsOvEMWGYunz1kaLZAFrV4NDAbOoDKvAQ==} requiresBuild: true dev: true optional: true @@ -17707,8 +17674,8 @@ packages: resolution: {integrity: sha512-dyajjnfzZD1tht4N7p7iwf7nBnR1MjVaVu+MKr+7gBgA39bn28wizCIJZztZPtHy4PY0YwtSGgwfBCuG/hnHgA==} dev: false - /posthog-js@1.160.3: - resolution: {integrity: sha512-mGvxOIlWPtdPx8EI0MQ81wNKlnH2K0n4RqwQOl044b34BCKiFVzZ7Hc7geMuZNaRAvCi5/5zyGeWHcAYZQxiMQ==} + /posthog-js@1.161.1: + resolution: {integrity: sha512-l897s6jjQUHfXDNjy6+CqIhRH/h5Up1Cr7PM2phq3Im+4zd66IJNJ7aSmMjP/HvQq/s16XO1LfGAGEPdbBr/xw==} dependencies: fflate: 0.4.8 preact: 10.23.2 @@ -18298,7 +18265,7 @@ packages: react: '>=15' dependencies: react: 18.2.0 - unlayer-types: 1.65.0 + unlayer-types: 1.77.0 dev: false /react-error-boundary@3.1.4(react@18.2.0): @@ -20832,8 +20799,8 @@ packages: resolution: {integrity: sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==} engines: {node: '>= 10.0.0'} - /unlayer-types@1.65.0: - resolution: {integrity: sha512-fIeh/TtUhQ16A0oW3mHkcDekvhIbZbN+h0qVgBuVxjGnYME/Ma3saFRO4eKJll0YNyalvb9MdmSz0nyTgr/1/w==} + /unlayer-types@1.77.0: + resolution: {integrity: sha512-/eIyHiGWO/EFb3GuD7FqD+jV9sXAj2FMhwn3QTVaio2FUz30RRAh8kQS+qNwPKIcLEZWKzfH41SBi3afudEVAA==} dev: false /unpipe@1.0.0: diff --git a/posthog/api/decide.py b/posthog/api/decide.py index 2cc8f7519d039..c5bdd0205afe4 100644 --- a/posthog/api/decide.py +++ b/posthog/api/decide.py @@ -207,10 +207,14 @@ def get_decide(request: HttpRequest): capture_network_timing = True if team.capture_performance_opt_in else False capture_web_vitals = True if team.autocapture_web_vitals_opt_in else False + autocapture_web_vitals_allowed_metrics = None + if capture_web_vitals: + autocapture_web_vitals_allowed_metrics = team.autocapture_web_vitals_allowed_metrics response["capturePerformance"] = ( { "network_timing": capture_network_timing, "web_vitals": capture_web_vitals, + "web_vitals_allowed_metrics": autocapture_web_vitals_allowed_metrics, } if capture_network_timing or capture_web_vitals else False diff --git a/posthog/api/email_verification.py b/posthog/api/email_verification.py index e22a298fe44a0..83c12d1dfe1e9 100644 --- a/posthog/api/email_verification.py +++ b/posthog/api/email_verification.py @@ -14,7 +14,7 @@ def is_email_verification_disabled(user: User) -> bool: # using disabled here so that the default state (if no flag exists) is that verification defaults to ON. return user.organization is not None and posthoganalytics.feature_enabled( VERIFICATION_DISABLED_FLAG, - user.organization.id, + str(user.organization.id), groups={"organization": str(user.organization.id)}, group_properties={"organization": {"id": str(user.organization.id)}}, ) diff --git a/posthog/api/error_tracking.py b/posthog/api/error_tracking.py index 7538c93d7248d..d8a9a5aeb68d1 100644 --- a/posthog/api/error_tracking.py +++ b/posthog/api/error_tracking.py @@ -13,7 +13,7 @@ class ErrorTrackingGroupSerializer(serializers.ModelSerializer): class Meta: model = ErrorTrackingGroup - fields = ["assignee"] + fields = ["assignee", "status"] class ErrorTrackingGroupViewSet(TeamAndOrgViewSetMixin, ForbidDestroyModel, viewsets.ModelViewSet): diff --git a/posthog/api/insight.py b/posthog/api/insight.py index 0f3d6d8f6398a..1905a49ca8c2a 100644 --- a/posthog/api/insight.py +++ b/posthog/api/insight.py @@ -11,7 +11,7 @@ from django.utils.timezone import now from django_filters.rest_framework import DjangoFilterBackend from drf_spectacular.types import OpenApiTypes -from drf_spectacular.utils import OpenApiParameter, OpenApiResponse +from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema_view from loginas.utils import is_impersonated_session from prometheus_client import Counter from rest_framework import request, serializers, status, viewsets @@ -64,7 +64,11 @@ from posthog.hogql_queries.legacy_compatibility.flagged_conversion_manager import ( conversion_to_query_based, ) -from posthog.hogql_queries.query_runner import execution_mode_from_refresh, shared_insights_execution_mode +from posthog.hogql_queries.query_runner import ( + ExecutionMode, + execution_mode_from_refresh, + shared_insights_execution_mode, +) from posthog.kafka_client.topics import KAFKA_METRICS_TIME_TO_SEE_DATA from posthog.models import DashboardTile, Filter, Insight, User from posthog.models.activity_logging.activity_log import ( @@ -632,6 +636,21 @@ def dashboard_tile_from_context(self, insight: Insight, dashboard: Optional[Dash return dashboard_tile +@extend_schema_view( + list=extend_schema( + parameters=[ + OpenApiParameter( + name="refresh", + enum=list(ExecutionMode), + default=ExecutionMode.CACHE_ONLY_NEVER_CALCULATE, + description=""" +Whether to refresh the retrieved insights and how aggressively. (The default `force_cache` value never refreshes.) +If an `_async` mode is chosen, this request kicks off a background query and returns immediately. +Background calculation can be tracked using the `query_status` response field.""", + ) + ] + ), +) class InsightViewSet( TeamAndOrgViewSetMixin, TaggedItemViewSetMixin, @@ -788,24 +807,19 @@ def _filter_request(self, request: request.Request, queryset: QuerySet) -> Query parameters=[ OpenApiParameter( name="refresh", - type=OpenApiTypes.BOOL, + enum=list(ExecutionMode), + default=ExecutionMode.CACHE_ONLY_NEVER_CALCULATE, description=""" - The client can request that an insight be refreshed by setting the `refresh=true` parameter. - The server will then decide if the data should or not be refreshed based on a set of heuristics - meant to determine the staleness of cached data. The result will contain as `is_cached` field - that indicates whether the insight was actually refreshed or not through the request.""", +Whether to refresh the insight and how aggressively. (The default `force_cache` value never refreshes.) +If an `_async` mode is chosen, this request kicks off a background query and returns immediately. +Background calculation can be tracked using the `query_status` response field.""", ), OpenApiParameter( name="from_dashboard", type=OpenApiTypes.INT, description=""" -When loading an insight for a dashboard pass a `from_dashboard` query parameter containing the dashboard ID - -e.g. `"/api/projects/{team_id}/insights/{insight_id}?from_dashboard={dashboard_id}"` - -Insights can be added to more than one dashboard, this allows the insight to be loaded in the correct context. - -Using the correct cache and enriching the response with dashboard specific config (e.g. layouts or colors)""", +Only if loading an insight in the context of a dashboard: The relevant dashboard's ID. +When set, the specified dashboard's filters and date range override will be applied.""", ), ], ) diff --git a/posthog/api/team.py b/posthog/api/team.py index 9f4e04e007eaf..00584574186ec 100644 --- a/posthog/api/team.py +++ b/posthog/api/team.py @@ -96,6 +96,7 @@ class Meta: "autocapture_opt_out", "autocapture_exceptions_opt_in", "autocapture_web_vitals_opt_in", + "autocapture_web_vitals_allowed_metrics", "autocapture_exceptions_errors_to_ignore", "capture_performance_opt_in", "capture_console_log_opt_in", @@ -143,6 +144,7 @@ class Meta: "autocapture_opt_out", "autocapture_exceptions_opt_in", "autocapture_web_vitals_opt_in", + "autocapture_web_vitals_allowed_metrics", "autocapture_exceptions_errors_to_ignore", "capture_console_log_opt_in", "capture_performance_opt_in", diff --git a/posthog/api/test/__snapshots__/test_action.ambr b/posthog/api/test/__snapshots__/test_action.ambr index a9331f33b4897..e31bb6984783f 100644 --- a/posthog/api/test/__snapshots__/test_action.ambr +++ b/posthog/api/test/__snapshots__/test_action.ambr @@ -49,6 +49,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -137,6 +138,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -457,6 +459,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", diff --git a/posthog/api/test/__snapshots__/test_annotation.ambr b/posthog/api/test/__snapshots__/test_annotation.ambr index 77f85cdcaeecb..ebf0634a89a6e 100644 --- a/posthog/api/test/__snapshots__/test_annotation.ambr +++ b/posthog/api/test/__snapshots__/test_annotation.ambr @@ -49,6 +49,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -105,6 +106,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -358,6 +360,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", diff --git a/posthog/api/test/__snapshots__/test_decide.ambr b/posthog/api/test/__snapshots__/test_decide.ambr index 1b4461960e2d4..d375d41ab6314 100644 --- a/posthog/api/test/__snapshots__/test_decide.ambr +++ b/posthog/api/test/__snapshots__/test_decide.ambr @@ -129,6 +129,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -282,6 +283,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -403,6 +405,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -499,6 +502,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -650,6 +654,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -746,6 +751,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -904,6 +910,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1016,6 +1023,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", diff --git a/posthog/api/test/__snapshots__/test_early_access_feature.ambr b/posthog/api/test/__snapshots__/test_early_access_feature.ambr index 90eea0c4bf854..ca93bf4cc878d 100644 --- a/posthog/api/test/__snapshots__/test_early_access_feature.ambr +++ b/posthog/api/test/__snapshots__/test_early_access_feature.ambr @@ -17,6 +17,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -158,6 +159,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", diff --git a/posthog/api/test/__snapshots__/test_element.ambr b/posthog/api/test/__snapshots__/test_element.ambr index 498ec7a5bcd92..b270c6665a3ea 100644 --- a/posthog/api/test/__snapshots__/test_element.ambr +++ b/posthog/api/test/__snapshots__/test_element.ambr @@ -49,6 +49,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", diff --git a/posthog/api/test/__snapshots__/test_feature_flag.ambr b/posthog/api/test/__snapshots__/test_feature_flag.ambr index e92a57e4e71b2..293f89b99e3f8 100644 --- a/posthog/api/test/__snapshots__/test_feature_flag.ambr +++ b/posthog/api/test/__snapshots__/test_feature_flag.ambr @@ -457,6 +457,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -663,6 +664,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1028,6 +1030,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1168,6 +1171,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1465,6 +1469,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1581,6 +1586,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1651,6 +1657,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1714,6 +1721,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", diff --git a/posthog/api/test/__snapshots__/test_insight.ambr b/posthog/api/test/__snapshots__/test_insight.ambr index 75bebbeb971f4..dff5dc4f49e93 100644 --- a/posthog/api/test/__snapshots__/test_insight.ambr +++ b/posthog/api/test/__snapshots__/test_insight.ambr @@ -688,6 +688,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -744,6 +745,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -876,6 +878,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1119,6 +1122,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1270,6 +1274,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1407,6 +1412,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1523,6 +1529,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1674,6 +1681,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1765,6 +1773,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1855,6 +1864,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1918,6 +1928,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", diff --git a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr index d9d63f0ee948b..f25c97da54780 100644 --- a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr +++ b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr @@ -98,6 +98,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -209,6 +210,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -300,6 +302,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -363,6 +366,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -504,6 +508,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -567,6 +572,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -658,6 +664,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -721,6 +728,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -876,6 +884,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -962,6 +971,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1401,6 +1411,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -2093,6 +2104,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", diff --git a/posthog/api/test/__snapshots__/test_preflight.ambr b/posthog/api/test/__snapshots__/test_preflight.ambr index a593c0073c526..2e4c27a3fa1ba 100644 --- a/posthog/api/test/__snapshots__/test_preflight.ambr +++ b/posthog/api/test/__snapshots__/test_preflight.ambr @@ -49,6 +49,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", diff --git a/posthog/api/test/__snapshots__/test_survey.ambr b/posthog/api/test/__snapshots__/test_survey.ambr index 22cc1302c85cc..f99886b236db2 100644 --- a/posthog/api/test/__snapshots__/test_survey.ambr +++ b/posthog/api/test/__snapshots__/test_survey.ambr @@ -176,6 +176,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", diff --git a/posthog/api/test/batch_exports/operations.py b/posthog/api/test/batch_exports/operations.py index 5ac814deab1e2..20f7d2761e2bf 100644 --- a/posthog/api/test/batch_exports/operations.py +++ b/posthog/api/test/batch_exports/operations.py @@ -1,5 +1,6 @@ from django.test.client import Client as TestClient from rest_framework import status +from posthog.models.utils import UUIDT def create_batch_export(client: TestClient, team_id: int, batch_export_data: dict | str): @@ -16,17 +17,17 @@ def create_batch_export_ok(client: TestClient, team_id: int, batch_export_data: return response.json() -def pause_batch_export(client: TestClient, team_id: int, batch_export_id: int): +def pause_batch_export(client: TestClient, team_id: int, batch_export_id: UUIDT): return client.post(f"/api/projects/{team_id}/batch_exports/{batch_export_id}/pause") -def pause_batch_export_ok(client: TestClient, team_id: int, batch_export_id: int): +def pause_batch_export_ok(client: TestClient, team_id: int, batch_export_id: UUIDT): response = pause_batch_export(client, team_id, batch_export_id) assert response.status_code == status.HTTP_200_OK, response.json() return response.json() -def unpause_batch_export(client: TestClient, team_id: int, batch_export_id: int, backfill: bool = False): +def unpause_batch_export(client: TestClient, team_id: int, batch_export_id: UUIDT, backfill: bool = False): return client.post( f"/api/projects/{team_id}/batch_exports/{batch_export_id}/unpause", {"backfill": backfill}, @@ -34,17 +35,17 @@ def unpause_batch_export(client: TestClient, team_id: int, batch_export_id: int, ) -def unpause_batch_export_ok(client: TestClient, team_id: int, batch_export_id: int, backfill: bool = False): +def unpause_batch_export_ok(client: TestClient, team_id: int, batch_export_id: UUIDT, backfill: bool = False): response = unpause_batch_export(client, team_id, batch_export_id, backfill) assert response.status_code == status.HTTP_200_OK, response.json() return response.json() -def get_batch_export(client: TestClient, team_id: int, batch_export_id: int): +def get_batch_export(client: TestClient, team_id: int, batch_export_id: UUIDT): return client.get(f"/api/projects/{team_id}/batch_exports/{batch_export_id}") -def get_batch_export_ok(client: TestClient, team_id: int, batch_export_id: int): +def get_batch_export_ok(client: TestClient, team_id: int, batch_export_id: UUIDT): response = get_batch_export(client, team_id, batch_export_id) assert response.status_code == status.HTTP_200_OK, response.json() return response.json() @@ -63,11 +64,11 @@ def get_batch_export_runs_ok(client: TestClient, team_id: int, batch_export_id: return response.json() -def delete_batch_export(client: TestClient, team_id: int, batch_export_id: int): +def delete_batch_export(client: TestClient, team_id: int, batch_export_id: UUIDT): return client.delete(f"/api/projects/{team_id}/batch_exports/{batch_export_id}") -def delete_batch_export_ok(client: TestClient, team_id: int, batch_export_id: int): +def delete_batch_export_ok(client: TestClient, team_id: int, batch_export_id: UUIDT): response = delete_batch_export(client, team_id, batch_export_id) assert response.status_code == status.HTTP_204_NO_CONTENT, response return response diff --git a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr index ba9ebb921549a..846901c63d5bc 100644 --- a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr +++ b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr @@ -49,6 +49,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -183,6 +184,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -287,6 +289,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -457,6 +460,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -667,6 +671,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -853,6 +858,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1032,6 +1038,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1313,6 +1320,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1376,6 +1384,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1542,6 +1551,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1598,6 +1608,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1730,6 +1741,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1966,6 +1978,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -2105,6 +2118,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -2261,6 +2275,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -2352,6 +2367,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -2442,6 +2458,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -2505,6 +2522,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -2600,6 +2618,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -2695,6 +2714,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -2830,6 +2850,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -2920,6 +2941,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -2983,6 +3005,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -3046,6 +3069,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -3178,6 +3202,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -3398,6 +3423,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -3502,6 +3528,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -3704,6 +3731,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -3882,6 +3910,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -4068,6 +4097,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -4247,6 +4277,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -4394,6 +4425,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -4667,6 +4699,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -4783,6 +4816,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -4958,6 +4992,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -5094,6 +5129,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -5196,6 +5232,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -5286,6 +5323,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -5349,6 +5387,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -5412,6 +5451,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -5544,6 +5584,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -5633,6 +5674,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -5696,6 +5738,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -5828,6 +5871,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -6052,6 +6096,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -6164,6 +6209,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -6255,6 +6301,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -6359,6 +6406,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -6422,6 +6470,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -6485,6 +6534,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -6617,6 +6667,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -6836,6 +6887,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -6948,6 +7000,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -7039,6 +7092,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -7102,6 +7156,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -7192,6 +7247,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -7255,6 +7311,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -7318,6 +7375,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -7450,6 +7508,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -7676,6 +7735,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -7788,6 +7848,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -7922,6 +7983,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -8026,6 +8088,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -8196,6 +8259,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -8375,6 +8439,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -8582,6 +8647,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -8677,6 +8743,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -8852,6 +8919,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -9042,6 +9110,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -9158,6 +9227,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -9333,6 +9403,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -9472,6 +9543,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -9696,6 +9768,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", diff --git a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr index ef58bf4b74489..a2c541d95986d 100644 --- a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr +++ b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr @@ -49,6 +49,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -137,6 +138,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -409,6 +411,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -509,6 +512,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", diff --git a/posthog/api/test/test_decide.py b/posthog/api/test/test_decide.py index 0395cbce3717d..4d680c27e0e56 100644 --- a/posthog/api/test/test_decide.py +++ b/posthog/api/test/test_decide.py @@ -189,7 +189,10 @@ def test_user_console_log_opt_in(self, *args): def test_user_performance_opt_in(self, *args): # :TRICKY: Test for regression around caching response = self._post_decide().json() - self.assertEqual(response["capturePerformance"], {"network_timing": True, "web_vitals": False}) + self.assertEqual( + response["capturePerformance"], + {"network_timing": True, "web_vitals": False, "web_vitals_allowed_metrics": None}, + ) self._update_team({"capture_performance_opt_in": False}) @@ -376,14 +379,33 @@ def test_exception_autocapture_opt_in(self, *args): def test_web_vitals_autocapture_opt_in(self, *args): response = self._post_decide().json() - self.assertEqual(response["capturePerformance"], {"web_vitals": False, "network_timing": True}) + self.assertEqual( + response["capturePerformance"], + {"web_vitals": False, "network_timing": True, "web_vitals_allowed_metrics": None}, + ) self._update_team({"autocapture_web_vitals_opt_in": True}) response = self._post_decide().json() self.assertEqual( response["capturePerformance"], - {"web_vitals": True, "network_timing": True}, + {"web_vitals": True, "network_timing": True, "web_vitals_allowed_metrics": None}, + ) + + def test_web_vitals_autocapture_allowed_metrics(self, *args): + response = self._post_decide().json() + self.assertEqual( + response["capturePerformance"], + {"web_vitals": False, "network_timing": True, "web_vitals_allowed_metrics": None}, + ) + + self._update_team({"autocapture_web_vitals_opt_in": True}) + self._update_team({"autocapture_web_vitals_allowed_metrics": ["CLS", "FCP"]}) + + response = self._post_decide().json() + self.assertEqual( + response["capturePerformance"], + {"web_vitals": True, "network_timing": True, "web_vitals_allowed_metrics": ["CLS", "FCP"]}, ) def test_user_session_recording_opt_in_wildcard_domain(self, *args): @@ -2848,7 +2870,10 @@ def test_decide_doesnt_error_out_when_database_is_down(self, *args): ) self.assertEqual(response["supportedCompression"], ["gzip", "gzip-js"]) self.assertEqual(response["siteApps"], []) - self.assertEqual(response["capturePerformance"], {"network_timing": True, "web_vitals": False}) + self.assertEqual( + response["capturePerformance"], + {"network_timing": True, "web_vitals": False, "web_vitals_allowed_metrics": None}, + ) self.assertEqual(response["featureFlags"], {}) self.assertEqual( response["autocaptureExceptions"], @@ -2873,7 +2898,10 @@ def test_decide_doesnt_error_out_when_database_is_down(self, *args): ) self.assertEqual(response["supportedCompression"], ["gzip", "gzip-js"]) self.assertEqual(response["siteApps"], []) - self.assertEqual(response["capturePerformance"], {"network_timing": True, "web_vitals": False}) + self.assertEqual( + response["capturePerformance"], + {"network_timing": True, "web_vitals": False, "web_vitals_allowed_metrics": None}, + ) self.assertEqual( response["autocaptureExceptions"], {"endpoint": "/e/"}, @@ -3665,7 +3693,10 @@ def test_decide_doesnt_error_out_when_database_is_down_and_database_check_isnt_c ) self.assertEqual(response["supportedCompression"], ["gzip", "gzip-js"]) self.assertEqual(response["siteApps"], []) - self.assertEqual(response["capturePerformance"], {"network_timing": True, "web_vitals": False}) + self.assertEqual( + response["capturePerformance"], + {"network_timing": True, "web_vitals": False, "web_vitals_allowed_metrics": None}, + ) self.assertEqual(response["featureFlags"], {"no-props": True}) self.assertEqual(response["errorsWhileComputingFlags"], True) diff --git a/posthog/api/test/test_query.py b/posthog/api/test/test_query.py index 78339bd3f30c2..e5a85099efd08 100644 --- a/posthog/api/test/test_query.py +++ b/posthog/api/test/test_query.py @@ -11,8 +11,10 @@ from posthog.models.utils import UUIDT from posthog.schema import ( CachedEventsQueryResponse, + DataWarehouseNode, EventPropertyFilter, EventsQuery, + FunnelsQuery, HogQLPropertyFilter, HogQLQuery, PersonPropertyFilter, @@ -731,6 +733,39 @@ def test_invalid_query_kind(self): api_response.content, ) + def test_funnel_query_with_data_warehouse_node_temporarily_raises(self): + # As of September 2024, funnels don't support data warehouse tables YET, so we want a helpful error message + api_response = self.client.post( + f"/api/projects/{self.team.id}/query/", + { + "query": FunnelsQuery( + series=[ + DataWarehouseNode( + id="xyz", + table_name="xyz", + id_field="id", + distinct_id_field="customer_email", + timestamp_field="created", + ), + DataWarehouseNode( + id="abc", + table_name="abc", + id_field="id", + distinct_id_field="customer_email", + timestamp_field="timestamp", + ), + ], + ).model_dump() + }, + ) + self.assertEqual(api_response.status_code, 400) + self.assertDictEqual( + api_response.json(), + self.validation_error_response( + "Data warehouse tables are not supported in funnels just yet. For now, please try this funnel without the data warehouse-based step." + ), + ) + def test_missing_query(self): api_response = self.client.post(f"/api/projects/{self.team.id}/query/", {"query": {}}) self.assertEqual(api_response.status_code, 400) diff --git a/posthog/cdp/templates/__init__.py b/posthog/cdp/templates/__init__.py index ca1b3fcea137c..4edf9ca937ca8 100644 --- a/posthog/cdp/templates/__init__.py +++ b/posthog/cdp/templates/__init__.py @@ -2,8 +2,8 @@ from .slack.template_slack import template as slack from .hubspot.template_hubspot import template as hubspot from .customerio.template_customerio import template as customerio, TemplateCustomerioMigrator -from .intercom.template_intercom import template as intercom -from .sendgrid.template_sendgrid import template as sendgrid +from .intercom.template_intercom import template as intercom, TemplateIntercomMigrator +from .sendgrid.template_sendgrid import template as sendgrid, TemplateSendGridMigrator from .clearbit.template_clearbit import template as clearbit from .posthog.template_posthog import template as posthog from .aws_kinesis.template_aws_kinesis import template as aws_kinesis @@ -14,6 +14,9 @@ ) from .zapier.template_zapier import template as zapier from .mailgun.template_mailgun import template_mailgun_send_email as mailgun +from .avo.template_avo import template as avo +from .loops.template_loops import template as loops +from .rudderstack.template_rudderstack import template as rudderstack HOG_FUNCTION_TEMPLATES = [ @@ -32,6 +35,9 @@ mailjet_update_contact_list, clearbit, mailgun, + loops, + rudderstack, + avo, ] @@ -39,6 +45,8 @@ HOG_FUNCTION_MIGRATORS = { TemplateCustomerioMigrator.plugin_url: TemplateCustomerioMigrator, + TemplateIntercomMigrator.plugin_url: TemplateIntercomMigrator, + TemplateSendGridMigrator.plugin_url: TemplateSendGridMigrator, } __all__ = ["HOG_FUNCTION_TEMPLATES", "HOG_FUNCTION_TEMPLATES_BY_ID"] diff --git a/posthog/cdp/templates/avo/template_avo.py b/posthog/cdp/templates/avo/template_avo.py new file mode 100644 index 0000000000000..98997294e63e8 --- /dev/null +++ b/posthog/cdp/templates/avo/template_avo.py @@ -0,0 +1,126 @@ +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate + + +template: HogFunctionTemplate = HogFunctionTemplate( + status="beta", + id="template-avo", + name="Send events to Avo", + description="Send events to Avo", + icon_url="/static/services/avo.png", + hog=""" +if (empty(inputs.apiKey) or empty(inputs.environment)) { + print('API Key and environment has to be set. Skipping...') + return +} + +let avoEvent := { + 'apiKey': inputs.apiKey, + 'env': inputs.environment, + 'appName': inputs.appName, + 'sessionId': event.properties.$session_id ?? generateUUIDv4(), + 'createdAt': toString(toDateTime(toUnixTimestamp(now()))), + 'avoFunction': false, + 'eventId': null, + 'eventHash': null, + 'appVersion': '1.0.0', + 'libVersion': '1.0.0', + 'libPlatform': 'node', + 'trackingId': '', + 'samplingRate': 1, + 'type': 'event', + 'eventName': event.name, + 'messageId': event.uuid, + 'eventProperties': [] +} + +fn getPropValueType(propValue) { + let propType := typeof(propValue) + if (propValue == null) { + return 'null' + } else if (propType == 'string') { + return 'string' + } else if (propType == 'integer') { + return 'int' + } else if (propType == 'float') { + return 'float' + } else if (propType == 'boolean') { + return 'boolean' + } else if (propType == 'object') { + return 'object' + } else if (propType == 'array') { + return 'list' + } else { + return propType + } +} + +for (let key, value in event.properties) { + let excludeProperties := arrayMap(x -> trim(x), splitByString(',', inputs.excludeProperties)) + let includeProperties := arrayMap(x -> trim(x), splitByString(',', inputs.includeProperties)) + let isExcluded := has(excludeProperties, key) + let isIncluded := includeProperties[1] == '' or has(includeProperties, key) + + if (not (key like '$%' or isExcluded or not isIncluded)) { + avoEvent.eventProperties := arrayPushBack(avoEvent.eventProperties, { 'propertyName': key, 'propertyType': getPropValueType(value) }) + } +} + +fetch('https://api.avo.app/inspector/posthog/v1/track', { + 'method': 'POST', + 'headers': { + 'env': inputs.environment, + 'api-key': inputs.apiKey, + 'content-type': 'application/json', + 'accept': 'application/json', + }, + 'body': [avoEvent] +}) +""".strip(), + inputs_schema=[ + { + "key": "apiKey", + "type": "string", + "label": "Avo API Key", + "description": "Avo source API key", + "default": "", + "secret": True, + "required": True, + }, + { + "key": "environment", + "type": "string", + "label": "Environment", + "description": "Environment name", + "default": "dev", + "secret": False, + "required": False, + }, + { + "key": "appName", + "type": "string", + "label": "App name", + "description": "App name", + "default": "PostHog", + "secret": False, + "required": False, + }, + { + "key": "excludeProperties", + "type": "string", + "label": "Properties to exclude", + "description": "Comma-separated list of event properties that will not be sent to Avo.", + "default": "", + "secret": False, + "required": False, + }, + { + "key": "includeProperties", + "type": "string", + "label": "Properties to include", + "description": "Comma separated list of event properties to send to Avo (will send all if left empty).", + "default": "", + "secret": False, + "required": False, + }, + ], +) diff --git a/posthog/cdp/templates/avo/test_template_avo.py b/posthog/cdp/templates/avo/test_template_avo.py new file mode 100644 index 0000000000000..f62e20f124a91 --- /dev/null +++ b/posthog/cdp/templates/avo/test_template_avo.py @@ -0,0 +1,140 @@ +from inline_snapshot import snapshot +from posthog.cdp.templates.helpers import BaseHogFunctionTemplateTest +from posthog.cdp.templates.avo.template_avo import template as template_avo + + +class TestTemplateAvo(BaseHogFunctionTemplateTest): + template = template_avo + + def _inputs(self, **kwargs): + inputs = { + "apiKey": "NnBd7B55ZXC6o0Kh20pE", + "environment": "dev", + "appName": "PostHog", + "excludeProperties": "", + "includeProperties": "", + } + inputs.update(kwargs) + return inputs + + def test_function_works(self): + self.run_function( + inputs=self._inputs(), + globals={ + "event": { + "uuid": "0191c693-d93b-7516-b1e3-64ec33c96464", + "distinct_id": "66e614bd-d9f2-491e-9e2c-eeab3090f72f", + "name": "sign up", + "properties": { + "distinct_id": "66e614bd-d9f2-491e-9e2c-eeab3090f72f", + "token": "phc_ex7Mnvi4DqeB6xSQoXU1UVPzAmUIpicMFKELQXGGTYQO", + "bob": {"name": "bob"}, + "age": 99, + "name": "bob", + "items": ["apple", "stick"], + "job": True, + "noop": None, + "test": 1.4, + }, + }, + "person": { + "properties": {"email": "max@posthog.com", "name": "Max", "company": "PostHog"}, + }, + }, + ) + + res = self.get_mock_fetch_calls()[0] + res[1]["body"][0]["sessionId"] = "4d4454b4-31bb-4b13-8167-4ec76a0f49b6" + res[1]["body"][0]["createdAt"] = "2024-09-06T09:04:28.324Z" + assert res == snapshot( + ( + "https://api.avo.app/inspector/posthog/v1/track", + { + "method": "POST", + "headers": { + "env": "dev", + "api-key": "NnBd7B55ZXC6o0Kh20pE", + "content-type": "application/json", + "accept": "application/json", + }, + "body": [ + { + "apiKey": "NnBd7B55ZXC6o0Kh20pE", + "env": "dev", + "appName": "PostHog", + "sessionId": "4d4454b4-31bb-4b13-8167-4ec76a0f49b6", + "createdAt": "2024-09-06T09:04:28.324Z", + "avoFunction": False, + "eventId": None, + "eventHash": None, + "appVersion": "1.0.0", + "libVersion": "1.0.0", + "libPlatform": "node", + "trackingId": "", + "samplingRate": 1, + "type": "event", + "eventName": "sign up", + "messageId": "0191c693-d93b-7516-b1e3-64ec33c96464", + "eventProperties": [ + {"propertyName": "distinct_id", "propertyType": "string"}, + {"propertyName": "token", "propertyType": "string"}, + {"propertyName": "bob", "propertyType": "object"}, + {"propertyName": "age", "propertyType": "int"}, + {"propertyName": "name", "propertyType": "string"}, + {"propertyName": "items", "propertyType": "list"}, + {"propertyName": "job", "propertyType": "boolean"}, + {"propertyName": "noop", "propertyType": "null"}, + {"propertyName": "test", "propertyType": "float"}, + ], + } + ], + }, + ) + ) + + def test_automatic_type_mapping(self): + for property_value, expected_type in [ + # (None, "null"), + ("Bob", "string"), + (99, "int"), + (1.4, "float"), + (True, "boolean"), + ({"name": "Bob"}, "object"), + ([1, 2, 3], "list"), + ]: + self.run_function( + inputs=self._inputs(), + globals={ + "event": {"name": "sign up", "properties": {"test": property_value}}, + }, + ) + + res = self.get_mock_fetch_calls()[0] + assert res[1]["body"][0]["eventProperties"][0]["propertyType"] == expected_type + + def test_property_filters(self): + # [excludeProperties, includeProperties], expected properties array + for filters, expected_result in [ + [["name", ""], ["company", "job"]], + [[" name ", ""], ["company", "job"]], + [["name", "name"], []], + [["", "name,company"], ["name", "company"]], + ]: + self.run_function( + inputs={ + "apiKey": "NnBd7B55ZXC6o0Kh20pE", + "environment": "dev", + "appName": "PostHog", + "excludeProperties": filters[0], + "includeProperties": filters[1], + }, + globals={ + "event": { + "name": "sign up", + "properties": {"name": "Max", "company": "PostHog", "job": "Product Engineer"}, + }, + }, + ) + + res = self.get_mock_fetch_calls()[0][1]["body"][0]["eventProperties"] + assert [item["propertyName"] for item in res] == expected_result diff --git a/posthog/cdp/templates/intercom/template_intercom.py b/posthog/cdp/templates/intercom/template_intercom.py index a8b1bf3d1aed9..fd97f2605759f 100644 --- a/posthog/cdp/templates/intercom/template_intercom.py +++ b/posthog/cdp/templates/intercom/template_intercom.py @@ -1,4 +1,6 @@ -from posthog.cdp.templates.hog_function_template import HogFunctionTemplate +from copy import deepcopy +import dataclasses +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate, HogFunctionTemplateMigrator template: HogFunctionTemplate = HogFunctionTemplate( @@ -8,19 +10,15 @@ description="Send events and contact information to Intercom", icon_url="/static/services/intercom.png", hog=""" -let accessToken := inputs.access_token -let host := inputs.host -let email := inputs.email - -if (empty(email)) { +if (empty(inputs.email)) { print('`email` input is empty. Skipping.') return } -let res := fetch(f'https://{host}/events', { +let res := fetch(f'https://{inputs.host}/events', { 'method': 'POST', 'headers': { - 'Authorization': f'Bearer {accessToken}', + 'Authorization': f'Bearer {inputs.access_token}', 'Content-Type': 'application/json', 'Accept': 'application/json' }, @@ -89,3 +87,47 @@ "filter_test_accounts": True, }, ) + + +class TemplateIntercomMigrator(HogFunctionTemplateMigrator): + plugin_url = "https://github.com/PostHog/posthog-intercom-plugin" + + @classmethod + def migrate(cls, obj): + hf = deepcopy(dataclasses.asdict(template)) + + useEuropeanDataStorage = obj.config.get("useEuropeanDataStorage", "No") + intercomApiKey = obj.config.get("intercomApiKey", "") + triggeringEvents = obj.config.get("triggeringEvents", "$identify") + ignoredEmailDomains = obj.config.get("ignoredEmailDomains", "") + + hf["filters"] = {} + + events_to_filter = [event.strip() for event in triggeringEvents.split(",") if event.strip()] + domains_to_filter = [domain.strip() for domain in ignoredEmailDomains.split(",") if domain.strip()] + + if domains_to_filter: + hf["filters"]["properties"] = [ + { + "key": "email", + "value": domain, + "operator": "not_icontains", + "type": "person", + } + for domain in domains_to_filter + ] + + if events_to_filter: + hf["filters"]["events"] = [ + {"id": event, "name": event, "type": "events", "order": 0} for event in events_to_filter + ] + + hf["inputs"] = { + "access_token": {"value": intercomApiKey}, + "host": {"value": "api.eu.intercom.com"} + if useEuropeanDataStorage == "Yes" + else {"value": "api.intercom.io"}, + "email": {"value": "{person.properties.email}"}, + } + + return hf diff --git a/posthog/cdp/templates/intercom/test_template_intercom.py b/posthog/cdp/templates/intercom/test_template_intercom.py index 9d9ec4aedb39a..f76f48f1d5485 100644 --- a/posthog/cdp/templates/intercom/test_template_intercom.py +++ b/posthog/cdp/templates/intercom/test_template_intercom.py @@ -1,5 +1,8 @@ +from inline_snapshot import snapshot from posthog.cdp.templates.helpers import BaseHogFunctionTemplateTest -from posthog.cdp.templates.intercom.template_intercom import template as template_intercom +from posthog.cdp.templates.intercom.template_intercom import template as template_intercom, TemplateIntercomMigrator +from posthog.models.plugin import PluginConfig +from posthog.test.base import BaseTest class TestTemplateIntercom(BaseHogFunctionTemplateTest): @@ -77,3 +80,83 @@ def test_logs_other_errors(self): }, ) ] + + +class TestTemplateMigration(BaseTest): + def get_plugin_config(self, config: dict): + _config = { + "intercomApiKey": "INTERCOM_API_KEY", + "triggeringEvents": "$identify", + "ignoredEmailDomains": "", + "useEuropeanDataStorage": "No", + } + + _config.update(config) + return PluginConfig(enabled=True, order=0, config=_config) + + def test_full_function(self): + obj = self.get_plugin_config({}) + + template = TemplateIntercomMigrator.migrate(obj) + assert template["inputs"] == snapshot( + { + "access_token": {"value": "INTERCOM_API_KEY"}, + "host": {"value": "api.intercom.io"}, + "email": {"value": "{person.properties.email}"}, + } + ) + assert template["filters"] == snapshot( + {"events": [{"id": "$identify", "name": "$identify", "type": "events", "order": 0}]} + ) + + def test_eu_host(self): + obj = self.get_plugin_config( + { + "useEuropeanDataStorage": "Yes", + } + ) + + template = TemplateIntercomMigrator.migrate(obj) + assert template["inputs"] == snapshot( + { + "access_token": {"value": "INTERCOM_API_KEY"}, + "host": {"value": "api.eu.intercom.com"}, + "email": {"value": "{person.properties.email}"}, + } + ) + + def test_triggering_events(self): + obj = self.get_plugin_config( + { + "triggeringEvents": "$identify,$pageview, custom event, ", + } + ) + + template = TemplateIntercomMigrator.migrate(obj) + assert template["filters"] == snapshot( + { + "events": [ + {"id": "$identify", "name": "$identify", "type": "events", "order": 0}, + {"id": "$pageview", "name": "$pageview", "type": "events", "order": 0}, + {"id": "custom event", "name": "custom event", "type": "events", "order": 0}, + ] + } + ) + + def test_ignore_domains(self): + obj = self.get_plugin_config( + { + "ignoredEmailDomains": "test.com, other.com, ", + } + ) + + template = TemplateIntercomMigrator.migrate(obj) + assert template["filters"] == snapshot( + { + "properties": [ + {"key": "email", "value": "test.com", "operator": "not_icontains", "type": "person"}, + {"key": "email", "value": "other.com", "operator": "not_icontains", "type": "person"}, + ], + "events": [{"id": "$identify", "name": "$identify", "type": "events", "order": 0}], + } + ) diff --git a/posthog/cdp/templates/loops/template_loops.py b/posthog/cdp/templates/loops/template_loops.py new file mode 100644 index 0000000000000..01230f7727328 --- /dev/null +++ b/posthog/cdp/templates/loops/template_loops.py @@ -0,0 +1,49 @@ +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate + + +template: HogFunctionTemplate = HogFunctionTemplate( + status="beta", + id="template-loops", + name="Send events to Loops", + description="Passes PostHog events to Loops.so", + icon_url="/static/services/loops.png", + hog=""" +let apiKey := inputs.apiKey + +let payload := { + 'userId': event.distinct_id, + 'eventName': event.name == '$set' ? '$identify' : event.name, + 'email': person.properties.email +} +for (let key, value in person.properties) { + payload[key] := value +} +fetch('https://app.loops.so/api/v1/events/send', { + 'method': 'POST', + 'headers': { + 'Content-Type': 'application/json', + 'Authorization': f'Bearer {apiKey}', + }, + 'body': payload +}) +""".strip(), + inputs_schema=[ + { + "key": "apiKey", + "type": "string", + "label": "Loops API Key", + "description": "Loops API Key", + "default": "", + "secret": True, + "required": True, + } + ], + filters={ + "events": [ + {"id": "$identify", "name": "$identify", "type": "events", "order": 0}, + {"id": "$set", "name": "$set", "type": "events", "order": 1}, + ], + "actions": [], + "filter_test_accounts": True, + }, +) diff --git a/posthog/cdp/templates/loops/test_template_loops.py b/posthog/cdp/templates/loops/test_template_loops.py new file mode 100644 index 0000000000000..c6d48b5228b14 --- /dev/null +++ b/posthog/cdp/templates/loops/test_template_loops.py @@ -0,0 +1,61 @@ +from inline_snapshot import snapshot +from posthog.cdp.templates.helpers import BaseHogFunctionTemplateTest +from posthog.cdp.templates.loops.template_loops import template as template_loops + + +class TestTemplateLoops(BaseHogFunctionTemplateTest): + template = template_loops + + def _inputs(self, **kwargs): + inputs = {"apiKey": "1cac089e00a708680bdb1ed9f082d5bf"} + inputs.update(kwargs) + return inputs + + def test_function_works(self): + self.run_function( + inputs=self._inputs(), + globals={ + "event": {"distinct_id": "66e614bd-d9f2-491e-9e2c-eeab3090f72f", "name": "$pageview"}, + "person": { + "properties": {"email": "max@posthog.com", "name": "Max", "company": "PostHog"}, + }, + }, + ) + + assert self.get_mock_fetch_calls()[0] == snapshot( + ( + "https://app.loops.so/api/v1/events/send", + { + "method": "POST", + "headers": { + "Content-Type": "application/json", + "Authorization": "Bearer 1cac089e00a708680bdb1ed9f082d5bf", + }, + "body": { + "userId": "66e614bd-d9f2-491e-9e2c-eeab3090f72f", + "eventName": "$pageview", + "email": "max@posthog.com", + "name": "Max", + "company": "PostHog", + }, + }, + ) + ) + + def test_automatic_action_mapping(self): + for event_name, expected_action in [ + ("$identify", "$identify"), + ("$set", "$identify"), + ("$pageview", "$pageview"), + ("$create_alias", "$create_alias"), + ("$autocapture", "$autocapture"), + ("custom", "custom"), + ]: + self.run_function( + inputs=self._inputs(), + globals={ + "event": {"name": event_name, "properties": {"url": "https://example.com", "$browser": "Chrome"}}, + }, + ) + + assert self.get_mock_fetch_calls()[0][1]["body"]["eventName"] == expected_action diff --git a/posthog/cdp/templates/rudderstack/template_rudderstack.py b/posthog/cdp/templates/rudderstack/template_rudderstack.py new file mode 100644 index 0000000000000..efbfafbaeaa9f --- /dev/null +++ b/posthog/cdp/templates/rudderstack/template_rudderstack.py @@ -0,0 +1,126 @@ +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate + + +template: HogFunctionTemplate = HogFunctionTemplate( + status="alpha", + id="template-rudderstack", + name="Send data to RudderStack", + description="Send data to RudderStack", + icon_url="/static/services/rudderstack.png", + hog=""" +fn getPayload() { + let rudderPayload := { + 'context': { + 'app': { + 'name': 'PostHogPlugin', + }, + 'os': { + 'name': event.properties.$os + }, + 'browser': event.properties.$browser, + 'browser_version': event.properties.$browser_version, + 'page': { + 'host': event.properties.$host, + 'url': event.properties.$current_url, + 'path': event.properties.$pathname, + 'referrer': event.properties.$referrer, + 'initial_referrer': event.properties.$initial_referrer, + 'referring_domain': event.properties.$referring_domain, + 'initial_referring_domain': event.properties.$initial_referring_domain, + }, + 'screen': { + 'height': event.properties.$screen_height, + 'width': event.properties.$screen_width, + }, + 'library': { + 'name': event.properties.$lib, + 'version': event.properties.$lib_version, + }, + 'ip': event.$ip, + 'active_feature_flags': event.properties.$active_feature_flags, + 'token': event.properties.token + }, + 'channel': 's2s', + 'messageId': event.uuid, + 'originalTimestamp': event.timestamp, + 'userId': inputs.identifier, + 'anonymousId': event.properties.$anon_distinct_id ?? event.properties.$device_id ?? event.properties.distinct_id, + 'type': 'track', + 'properties': {}, + } + + if (event.name in ('$identify', '$set')) { + rudderPayload.type := 'identify' + rudderPayload.context.trait := event.properties.$set + rudderPayload.traits := event.properties.$set + } else if (event.name == '$create_alias') { + rudderPayload.type := 'alias' + rudderPayload.userId := event.properties.alias + rudderPayload.previousId := event.distinct_id + } else if (event.name == '$pageview') { + rudderPayload.type := 'page' + rudderPayload.name := event.properties.name + rudderPayload.properties.host := event.properties.$host + rudderPayload.properties.url := event.properties.$current_url + rudderPayload.properties.path := event.properties.$pathname + rudderPayload.properties.referrer := event.properties.$referrer + rudderPayload.properties.initial_referrer := event.properties.$initial_referrer + rudderPayload.properties.referring_domain := event.properties.$referring_domain + rudderPayload.properties.initial_referring_domain := event.properties.$initial_referring_domain + } else if (event.name == '$autocapture') { + rudderPayload.type := 'track' + rudderPayload.event := event.properties.$event_type + } else { + rudderPayload.type := 'track' + rudderPayload.event := event.name + } + + for (let key, value in event.properties) { + if (value != null and not key like '$%') { + rudderPayload.properties[key] := value + } + } + + return { + 'method': 'POST', + 'headers': { + 'Content-Type': 'application/json', + 'Authorization': f'Basic {base64Encode(f'{inputs.token}:')}', + }, + 'body': { + 'batch': [rudderPayload], + 'sentAt': now() + } + } +} + +fetch(f'{inputs.host}/v1/batch', getPayload()) +""".strip(), + inputs_schema=[ + { + "key": "host", + "type": "string", + "label": "Rudderstack host", + "description": "The destination of the Rudderstack instance", + "default": "https://hosted.rudderlabs.com", + "secret": False, + "required": True, + }, + { + "key": "token", + "type": "string", + "label": "Write API key", + "description": "RudderStack Source Writekey", + "secret": False, + "required": True, + }, + { + "key": "identifier", + "type": "string", + "label": "Identifier", + "default": "{person.uuid}", + "secret": False, + "required": True, + }, + ], +) diff --git a/posthog/cdp/templates/rudderstack/test_template_rudderstack.py b/posthog/cdp/templates/rudderstack/test_template_rudderstack.py new file mode 100644 index 0000000000000..61469f8d3f79d --- /dev/null +++ b/posthog/cdp/templates/rudderstack/test_template_rudderstack.py @@ -0,0 +1,107 @@ +from inline_snapshot import snapshot +from posthog.cdp.templates.helpers import BaseHogFunctionTemplateTest +from posthog.cdp.templates.rudderstack.template_rudderstack import template as template_rudderstack + + +class TestTemplateRudderstack(BaseHogFunctionTemplateTest): + template = template_rudderstack + + def _inputs(self, **kwargs): + inputs = { + "host": "https://hosted.rudderlabs.com", + "token": "asjdkfasdkjfaskfkjfhdsf", + "identifier": "a08ff8e1-a5ee-49cc-99e9-564e455c33f0", + } + inputs.update(kwargs) + return inputs + + def test_function_works(self): + self.run_function( + inputs=self._inputs(), + globals={ + "event": { + "uuid": "96a04bdc-6021-4120-a3e3-f1988f59ba5f", + "timestamp": "2024-08-29T13:40:22.713Z", + "distinct_id": "85bcd2e4-d10d-4a99-9dc8-43789b7226a1", + "name": "$pageview", + "properties": {"$current_url": "https://example.com", "$browser": "Chrome"}, + }, + "person": {"uuid": "a08ff8e1-a5ee-49cc-99e9-564e455c33f0"}, + }, + ) + + res = self.get_mock_fetch_calls()[0] + res[1]["body"]["sentAt"]["dt"] = 1724946899.775266 + assert res == snapshot( + ( + "https://hosted.rudderlabs.com/v1/batch", + { + "method": "POST", + "headers": { + "Content-Type": "application/json", + "Authorization": "Basic YXNqZGtmYXNka2pmYXNrZmtqZmhkc2Y6", + }, + "body": { + "batch": [ + { + "context": { + "app": {"name": "PostHogPlugin"}, + "os": {"name": None}, + "browser": "Chrome", + "browser_version": None, + "page": { + "host": None, + "url": "https://example.com", + "path": None, + "referrer": None, + "initial_referrer": None, + "referring_domain": None, + "initial_referring_domain": None, + }, + "screen": {"height": None, "width": None}, + "library": {"name": None, "version": None}, + "ip": None, + "active_feature_flags": None, + "token": None, + }, + "channel": "s2s", + "messageId": "96a04bdc-6021-4120-a3e3-f1988f59ba5f", + "originalTimestamp": "2024-08-29T13:40:22.713Z", + "userId": "a08ff8e1-a5ee-49cc-99e9-564e455c33f0", + "anonymousId": None, + "type": "page", + "properties": { + "host": None, + "url": "https://example.com", + "path": None, + "referrer": None, + "initial_referrer": None, + "referring_domain": None, + "initial_referring_domain": None, + }, + "name": None, + } + ], + "sentAt": {"__hogDateTime__": True, "dt": 1724946899.775266, "zone": "UTC"}, + }, + }, + ) + ) + + def test_automatic_action_mapping(self): + for event_name, expected_action in [ + ("$identify", "identify"), + ("$set", "identify"), + ("$pageview", "page"), + ("$create_alias", "alias"), + ("$autocapture", "track"), + ("custom", "track"), + ]: + self.run_function( + inputs=self._inputs(), + globals={ + "event": {"name": event_name, "properties": {"url": "https://example.com", "$browser": "Chrome"}}, + }, + ) + + assert self.get_mock_fetch_calls()[0][1]["body"]["batch"][0]["type"] == expected_action diff --git a/posthog/cdp/templates/sendgrid/template_sendgrid.py b/posthog/cdp/templates/sendgrid/template_sendgrid.py index cdd0aa625bcde..a3d111a88c4b4 100644 --- a/posthog/cdp/templates/sendgrid/template_sendgrid.py +++ b/posthog/cdp/templates/sendgrid/template_sendgrid.py @@ -1,4 +1,7 @@ -from posthog.cdp.templates.hog_function_template import HogFunctionTemplate +import dataclasses +from copy import deepcopy + +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate, HogFunctionTemplateMigrator # Based off of https://www.twilio.com/docs/sendgrid/api-reference/contacts/add-or-update-a-contact @@ -9,15 +12,13 @@ description="Update marketing contacts in Sendgrid", icon_url="/static/services/sendgrid.png", hog=""" -let email := inputs.email - -if (empty(email)) { +if (empty(inputs.email)) { print('`email` input is empty. Not updating contacts.') return } let contact := { - 'email': email, + 'email': inputs.email, } for (let key, value in inputs.properties) { @@ -26,15 +27,32 @@ } } +let headers := { + 'Authorization': f'Bearer {inputs.api_key}', + 'Content-Type': 'application/json' +} + +if (not empty(inputs.custom_fields)) { + let response := fetch('https://api.sendgrid.com/v3/marketing/field_definitions', { + 'method': 'GET', + 'headers': headers + }) + if (response.status != 200) { + throw Error(f'Could not fetch custom fields. Status: {response.status}') + } + contact['custom_fields'] := {} + for (let obj in response.body?.custom_fields ?? {}) { + let inputValue := inputs.custom_fields[obj.name] + if (not empty(inputValue)) { + contact['custom_fields'][obj.id] := inputValue + } + } +} + let res := fetch('https://api.sendgrid.com/v3/marketing/contacts', { 'method': 'PUT', - 'headers': { - 'Authorization': f'Bearer {inputs.api_key}', - 'Content-Type': 'application/json' - }, - 'body': { - 'contacts': [contact] - } + 'headers': headers, + 'body': { 'contacts': [contact] } }) if (res.status > 300) { @@ -61,11 +79,11 @@ { "key": "properties", "type": "dictionary", - "label": "Property mapping", - "description": "Map of reserved properties (https://www.twilio.com/docs/sendgrid/api-reference/contacts/add-or-update-a-contact)", + "label": "Reserved fields", + "description": "The following field names are allowed: address_line_1, address_line_2, alternate_emails, anonymous_id, city, country, email, external_id, facebook, first_name, last_name, phone_number_id, postal_code, state_province_region, unique_name, whatsapp.", "default": { - "last_name": "{person.properties.last_name}", "first_name": "{person.properties.first_name}", + "last_name": "{person.properties.last_name}", "city": "{person.properties.city}", "country": "{person.properties.country}", "postal_code": "{person.properties.postal_code}", @@ -73,7 +91,15 @@ "secret": False, "required": True, }, - # TODO: Add dynamic code for loading custom fields + { + "key": "custom_fields", + "type": "dictionary", + "label": "Custom fields", + "description": "Configure custom fields in SendGrid before using them here: https://mc.sendgrid.com/custom-fields", + "default": {}, + "secret": False, + "required": False, + }, ], filters={ "events": [{"id": "$identify", "name": "$identify", "type": "events", "order": 0}], @@ -81,3 +107,55 @@ "filter_test_accounts": True, }, ) + + +class TemplateSendGridMigrator(HogFunctionTemplateMigrator): + plugin_url = "https://github.com/PostHog/sendgrid-plugin" + + @classmethod + def migrate(cls, obj): + hf = deepcopy(dataclasses.asdict(template)) + + sendgridApiKey = obj.config.get("sendgridApiKey", "") + customFields = obj.config.get("customFields", "") + sendgrid_fields = [ + "address_line_1", + "address_line_2", + "alternate_emails", + "anonymous_id", + "city", + "country", + "email", + "external_id", + "facebook", + "first_name", + "last_name", + "phone_number_id", + "postal_code", + "state_province_region", + "unique_name", + "whatsapp", + ] + + hf["filters"] = {} + hf["filters"]["events"] = [{"id": "$identify", "name": "$identify", "type": "events", "order": 0}] + + hf["inputs"] = { + "api_key": {"value": sendgridApiKey}, + "email": {"value": "{person.properties.email}"}, + "properties": {"value": {}}, + "custom_fields": {"value": {}}, + } + if customFields: + for field in customFields.split(","): + if "=" in field: + posthog_prop, sendgrid_field = field.split("=") + else: + posthog_prop = sendgrid_field = field.strip() + posthog_prop = f"{{person.properties.{posthog_prop}}}" + if sendgrid_field in sendgrid_fields: + hf["inputs"]["properties"]["value"][sendgrid_field] = posthog_prop + else: + hf["inputs"]["custom_fields"]["value"][sendgrid_field] = posthog_prop + + return hf diff --git a/posthog/cdp/templates/sendgrid/test_template_sendgrid.py b/posthog/cdp/templates/sendgrid/test_template_sendgrid.py index 22bc8a39cebf6..eab55f1153b69 100644 --- a/posthog/cdp/templates/sendgrid/test_template_sendgrid.py +++ b/posthog/cdp/templates/sendgrid/test_template_sendgrid.py @@ -1,6 +1,8 @@ from inline_snapshot import snapshot from posthog.cdp.templates.helpers import BaseHogFunctionTemplateTest -from posthog.cdp.templates.sendgrid.template_sendgrid import template as template_sendgrid +from posthog.cdp.templates.sendgrid.template_sendgrid import template as template_sendgrid, TemplateSendGridMigrator +from posthog.models import PluginConfig +from posthog.test.base import BaseTest class TestTemplateSendgrid(BaseHogFunctionTemplateTest): @@ -47,3 +49,138 @@ def test_function_doesnt_include_empty_properties(self): assert self.get_mock_fetch_calls()[0][1]["body"]["contacts"] == snapshot( [{"email": "example@posthog.com", "last_name": "included"}] ) + + def test_function_adds_custom_fields(self): + self.mock_fetch_response = lambda *args: { # type: ignore + "status": 200, + "body": {"custom_fields": [{"id": "id7", "name": "custom_field"}]}, + } + + res = self.run_function( + inputs=self._inputs( + custom_fields={"custom_field": "custom_value"}, + ) + ) + assert res.result is None + + assert self.get_mock_fetch_calls()[0] == snapshot( + ( + "https://api.sendgrid.com/v3/marketing/field_definitions", + { + "method": "GET", + "headers": {"Authorization": "Bearer API_KEY", "Content-Type": "application/json"}, + }, + ) + ) + + assert self.get_mock_fetch_calls()[1] == snapshot( + ( + "https://api.sendgrid.com/v3/marketing/contacts", + { + "method": "PUT", + "headers": {"Authorization": "Bearer API_KEY", "Content-Type": "application/json"}, + "body": { + "contacts": [ + { + "email": "example@posthog.com", + "last_name": "example", + "custom_fields": {"id7": "custom_value"}, + } + ] + }, + }, + ) + ) + + +class TestTemplateMigration(BaseTest): + def get_plugin_config(self, config: dict): + _config = { + "sendgridApiKey": "SENDGRID_API_KEY", + "customFields": "", + } + _config.update(config) + return PluginConfig(enabled=True, order=0, config=_config) + + def test_empty_fields(self): + obj = self.get_plugin_config({}) + + template = TemplateSendGridMigrator.migrate(obj) + assert template["inputs"] == snapshot( + { + "api_key": {"value": "SENDGRID_API_KEY"}, + "email": {"value": "{person.properties.email}"}, + "custom_fields": {"value": {}}, + "properties": {"value": {}}, + } + ) + assert template["filters"] == snapshot( + {"events": [{"id": "$identify", "name": "$identify", "type": "events", "order": 0}]} + ) + + def test_default_properties(self): + obj = self.get_plugin_config({"customFields": "last_name,first_name"}) + + template = TemplateSendGridMigrator.migrate(obj) + assert template["inputs"] == snapshot( + { + "api_key": {"value": "SENDGRID_API_KEY"}, + "email": {"value": "{person.properties.email}"}, + "custom_fields": {"value": {}}, + "properties": { + "value": { + "last_name": "{person.properties.last_name}", + "first_name": "{person.properties.first_name}", + } + }, + } + ) + assert template["filters"] == snapshot( + {"events": [{"id": "$identify", "name": "$identify", "type": "events", "order": 0}]} + ) + + def test_custom_fields(self): + obj = self.get_plugin_config({"customFields": "last_name,first_name,misc_name,banana"}) + + template = TemplateSendGridMigrator.migrate(obj) + assert template["inputs"] == snapshot( + { + "api_key": {"value": "SENDGRID_API_KEY"}, + "email": {"value": "{person.properties.email}"}, + "custom_fields": { + "value": {"misc_name": "{person.properties.misc_name}", "banana": "{person.properties.banana}"} + }, + "properties": { + "value": { + "last_name": "{person.properties.last_name}", + "first_name": "{person.properties.first_name}", + } + }, + } + ) + assert template["filters"] == snapshot( + {"events": [{"id": "$identify", "name": "$identify", "type": "events", "order": 0}]} + ) + + def test_property_rename(self): + obj = self.get_plugin_config({"customFields": "$lastName=last_name,first_name,misc_name,$pineapple=banana"}) + + template = TemplateSendGridMigrator.migrate(obj) + assert template["inputs"] == snapshot( + { + "api_key": {"value": "SENDGRID_API_KEY"}, + "email": {"value": "{person.properties.email}"}, + "custom_fields": { + "value": {"misc_name": "{person.properties.misc_name}", "banana": "{person.properties.$pineapple}"} + }, + "properties": { + "value": { + "last_name": "{person.properties.$lastName}", + "first_name": "{person.properties.first_name}", + } + }, + } + ) + assert template["filters"] == snapshot( + {"events": [{"id": "$identify", "name": "$identify", "type": "events", "order": 0}]} + ) diff --git a/posthog/hogql/bytecode.py b/posthog/hogql/bytecode.py index b0f0e290017fb..523a03d37a4f8 100644 --- a/posthog/hogql/bytecode.py +++ b/posthog/hogql/bytecode.py @@ -391,6 +391,14 @@ def visit_block(self, node: ast.Block): def visit_expr_statement(self, node: ast.ExprStatement): if node.expr is None: return [] + if isinstance(node.expr, ast.CompareOperation) and node.expr.op == ast.CompareOperationOp.Eq: + self.context.warnings.append( + HogQLNotice( + start=node.start, + end=node.end, + message="You must use ':=' for assignment instead of '='.", + ) + ) response = self.visit(node.expr) response.append(Operation.POP) return response diff --git a/posthog/hogql/constants.py b/posthog/hogql/constants.py index 557cbfd05c01e..5e64111632997 100644 --- a/posthog/hogql/constants.py +++ b/posthog/hogql/constants.py @@ -92,6 +92,8 @@ def get_breakdown_limit_for_context(limit_context: LimitContext) -> int: class HogQLQuerySettings(BaseModel): model_config = ConfigDict(extra="forbid") optimize_aggregation_in_order: Optional[bool] = None + date_time_output_format: Optional[str] = None + date_time_input_format: Optional[str] = None # Settings applied on top of all HogQL queries. diff --git a/posthog/hogql/database/database.py b/posthog/hogql/database/database.py index 3f07f8bc763a5..5030f966929e4 100644 --- a/posthog/hogql/database/database.py +++ b/posthog/hogql/database/database.py @@ -748,15 +748,24 @@ def serialize_fields( ) ) elif isinstance(field, LazyJoin): - is_view = isinstance(field.resolve_table(context), SavedQuery) + resolved_table = field.resolve_table(context) + + if isinstance(resolved_table, SavedQuery): + type = DatabaseSerializedFieldType.VIEW + id = str(resolved_table.id) + else: + type = DatabaseSerializedFieldType.LAZY_TABLE + id = None + field_output.append( DatabaseSchemaField( name=field_key, hogql_value=hogql_value, - type=DatabaseSerializedFieldType.VIEW if is_view else DatabaseSerializedFieldType.LAZY_TABLE, + type=type, schema_valid=schema_valid, table=field.resolve_table(context).to_printed_hogql(), fields=list(field.resolve_table(context).fields.keys()), + id=id, ) ) elif isinstance(field, VirtualTable): diff --git a/posthog/hogql/database/models.py b/posthog/hogql/database/models.py index fe90976bec3cd..f7e0211e419e2 100644 --- a/posthog/hogql/database/models.py +++ b/posthog/hogql/database/models.py @@ -219,6 +219,7 @@ class SavedQuery(Table): A table that returns a subquery, e.g. my_saved_query -> (SELECT * FROM some_saved_table). The team_id guard is NOT added for the overall subquery """ + id: str query: str name: str diff --git a/posthog/hogql/database/test/__snapshots__/test_database.ambr b/posthog/hogql/database/test/__snapshots__/test_database.ambr index 6e4834dda1031..8345a36c4208c 100644 --- a/posthog/hogql/database/test/__snapshots__/test_database.ambr +++ b/posthog/hogql/database/test/__snapshots__/test_database.ambr @@ -23,6 +23,7 @@ "chain": null, "fields": null, "hogql_value": "uuid", + "id": null, "name": "uuid", "schema_valid": true, "table": null, @@ -32,6 +33,7 @@ "chain": null, "fields": null, "hogql_value": "event", + "id": null, "name": "event", "schema_valid": true, "table": null, @@ -41,6 +43,7 @@ "chain": null, "fields": null, "hogql_value": "properties", + "id": null, "name": "properties", "schema_valid": true, "table": null, @@ -50,6 +53,7 @@ "chain": null, "fields": null, "hogql_value": "timestamp", + "id": null, "name": "timestamp", "schema_valid": true, "table": null, @@ -59,6 +63,7 @@ "chain": null, "fields": null, "hogql_value": "distinct_id", + "id": null, "name": "distinct_id", "schema_valid": true, "table": null, @@ -68,6 +73,7 @@ "chain": null, "fields": null, "hogql_value": "elements_chain", + "id": null, "name": "elements_chain", "schema_valid": true, "table": null, @@ -77,6 +83,7 @@ "chain": null, "fields": null, "hogql_value": "created_at", + "id": null, "name": "created_at", "schema_valid": true, "table": null, @@ -86,6 +93,7 @@ "chain": null, "fields": null, "hogql_value": "`$session_id`", + "id": null, "name": "$session_id", "schema_valid": true, "table": null, @@ -95,6 +103,7 @@ "chain": null, "fields": null, "hogql_value": "`$window_id`", + "id": null, "name": "$window_id", "schema_valid": true, "table": null, @@ -109,6 +118,7 @@ "person" ], "hogql_value": "pdi", + "id": null, "name": "pdi", "schema_valid": true, "table": "person_distinct_ids", @@ -122,6 +132,7 @@ "properties" ], "hogql_value": "poe", + "id": null, "name": "poe", "schema_valid": true, "table": "events", @@ -135,6 +146,7 @@ "properties" ], "hogql_value": "goe_0", + "id": null, "name": "goe_0", "schema_valid": true, "table": "events", @@ -148,6 +160,7 @@ "properties" ], "hogql_value": "goe_1", + "id": null, "name": "goe_1", "schema_valid": true, "table": "events", @@ -161,6 +174,7 @@ "properties" ], "hogql_value": "goe_2", + "id": null, "name": "goe_2", "schema_valid": true, "table": "events", @@ -174,6 +188,7 @@ "properties" ], "hogql_value": "goe_3", + "id": null, "name": "goe_3", "schema_valid": true, "table": "events", @@ -187,6 +202,7 @@ "properties" ], "hogql_value": "goe_4", + "id": null, "name": "goe_4", "schema_valid": true, "table": "events", @@ -199,6 +215,7 @@ ], "fields": null, "hogql_value": "person", + "id": null, "name": "person", "schema_valid": true, "table": null, @@ -211,6 +228,7 @@ ], "fields": null, "hogql_value": "person_id", + "id": null, "name": "person_id", "schema_valid": true, "table": null, @@ -220,6 +238,7 @@ "chain": null, "fields": null, "hogql_value": "`$group_0`", + "id": null, "name": "$group_0", "schema_valid": true, "table": null, @@ -236,6 +255,7 @@ "properties" ], "hogql_value": "group_0", + "id": null, "name": "group_0", "schema_valid": true, "table": "groups", @@ -245,6 +265,7 @@ "chain": null, "fields": null, "hogql_value": "`$group_1`", + "id": null, "name": "$group_1", "schema_valid": true, "table": null, @@ -261,6 +282,7 @@ "properties" ], "hogql_value": "group_1", + "id": null, "name": "group_1", "schema_valid": true, "table": "groups", @@ -270,6 +292,7 @@ "chain": null, "fields": null, "hogql_value": "`$group_2`", + "id": null, "name": "$group_2", "schema_valid": true, "table": null, @@ -286,6 +309,7 @@ "properties" ], "hogql_value": "group_2", + "id": null, "name": "group_2", "schema_valid": true, "table": "groups", @@ -295,6 +319,7 @@ "chain": null, "fields": null, "hogql_value": "`$group_3`", + "id": null, "name": "$group_3", "schema_valid": true, "table": null, @@ -311,6 +336,7 @@ "properties" ], "hogql_value": "group_3", + "id": null, "name": "group_3", "schema_valid": true, "table": "groups", @@ -320,6 +346,7 @@ "chain": null, "fields": null, "hogql_value": "`$group_4`", + "id": null, "name": "$group_4", "schema_valid": true, "table": null, @@ -336,6 +363,7 @@ "properties" ], "hogql_value": "group_4", + "id": null, "name": "group_4", "schema_valid": true, "table": "groups", @@ -376,6 +404,7 @@ "$page_screen_autocapture_count_up_to" ], "hogql_value": "session", + "id": null, "name": "session", "schema_valid": true, "table": "sessions", @@ -385,6 +414,7 @@ "chain": null, "fields": null, "hogql_value": "elements_chain_href", + "id": null, "name": "elements_chain_href", "schema_valid": true, "table": null, @@ -394,6 +424,7 @@ "chain": null, "fields": null, "hogql_value": "elements_chain_texts", + "id": null, "name": "elements_chain_texts", "schema_valid": true, "table": null, @@ -403,6 +434,7 @@ "chain": null, "fields": null, "hogql_value": "elements_chain_ids", + "id": null, "name": "elements_chain_ids", "schema_valid": true, "table": null, @@ -412,6 +444,7 @@ "chain": null, "fields": null, "hogql_value": "elements_chain_elements", + "id": null, "name": "elements_chain_elements", "schema_valid": true, "table": null, @@ -428,6 +461,7 @@ "chain": null, "fields": null, "hogql_value": "index", + "id": null, "name": "index", "schema_valid": true, "table": null, @@ -437,6 +471,7 @@ "chain": null, "fields": null, "hogql_value": "key", + "id": null, "name": "key", "schema_valid": true, "table": null, @@ -446,6 +481,7 @@ "chain": null, "fields": null, "hogql_value": "created_at", + "id": null, "name": "created_at", "schema_valid": true, "table": null, @@ -455,6 +491,7 @@ "chain": null, "fields": null, "hogql_value": "updated_at", + "id": null, "name": "updated_at", "schema_valid": true, "table": null, @@ -464,6 +501,7 @@ "chain": null, "fields": null, "hogql_value": "properties", + "id": null, "name": "properties", "schema_valid": true, "table": null, @@ -480,6 +518,7 @@ "chain": null, "fields": null, "hogql_value": "id", + "id": null, "name": "id", "schema_valid": true, "table": null, @@ -489,6 +528,7 @@ "chain": null, "fields": null, "hogql_value": "created_at", + "id": null, "name": "created_at", "schema_valid": true, "table": null, @@ -498,6 +538,7 @@ "chain": null, "fields": null, "hogql_value": "properties", + "id": null, "name": "properties", "schema_valid": true, "table": null, @@ -507,6 +548,7 @@ "chain": null, "fields": null, "hogql_value": "is_identified", + "id": null, "name": "is_identified", "schema_valid": true, "table": null, @@ -520,6 +562,7 @@ "person_id" ], "hogql_value": "pdi", + "id": null, "name": "pdi", "schema_valid": true, "table": "person_distinct_ids", @@ -529,6 +572,7 @@ "chain": null, "fields": null, "hogql_value": "`$virt_initial_referring_domain_type`", + "id": null, "name": "$virt_initial_referring_domain_type", "schema_valid": true, "table": null, @@ -538,6 +582,7 @@ "chain": null, "fields": null, "hogql_value": "`$virt_initial_channel_type`", + "id": null, "name": "$virt_initial_channel_type", "schema_valid": true, "table": null, @@ -554,6 +599,7 @@ "chain": null, "fields": null, "hogql_value": "distinct_id", + "id": null, "name": "distinct_id", "schema_valid": true, "table": null, @@ -563,6 +609,7 @@ "chain": null, "fields": null, "hogql_value": "person_id", + "id": null, "name": "person_id", "schema_valid": true, "table": null, @@ -581,6 +628,7 @@ "$virt_initial_channel_type" ], "hogql_value": "person", + "id": null, "name": "person", "schema_valid": true, "table": "persons", @@ -597,6 +645,7 @@ "chain": null, "fields": null, "hogql_value": "session_id", + "id": null, "name": "session_id", "schema_valid": true, "table": null, @@ -606,6 +655,7 @@ "chain": null, "fields": null, "hogql_value": "distinct_id", + "id": null, "name": "distinct_id", "schema_valid": true, "table": null, @@ -615,6 +665,7 @@ "chain": null, "fields": null, "hogql_value": "first_url", + "id": null, "name": "first_url", "schema_valid": true, "table": null, @@ -624,6 +675,7 @@ "chain": null, "fields": null, "hogql_value": "click_count", + "id": null, "name": "click_count", "schema_valid": true, "table": null, @@ -633,6 +685,7 @@ "chain": null, "fields": null, "hogql_value": "keypress_count", + "id": null, "name": "keypress_count", "schema_valid": true, "table": null, @@ -642,6 +695,7 @@ "chain": null, "fields": null, "hogql_value": "mouse_activity_count", + "id": null, "name": "mouse_activity_count", "schema_valid": true, "table": null, @@ -651,6 +705,7 @@ "chain": null, "fields": null, "hogql_value": "active_milliseconds", + "id": null, "name": "active_milliseconds", "schema_valid": true, "table": null, @@ -660,6 +715,7 @@ "chain": null, "fields": null, "hogql_value": "console_log_count", + "id": null, "name": "console_log_count", "schema_valid": true, "table": null, @@ -669,6 +725,7 @@ "chain": null, "fields": null, "hogql_value": "console_warn_count", + "id": null, "name": "console_warn_count", "schema_valid": true, "table": null, @@ -678,6 +735,7 @@ "chain": null, "fields": null, "hogql_value": "console_error_count", + "id": null, "name": "console_error_count", "schema_valid": true, "table": null, @@ -687,6 +745,7 @@ "chain": null, "fields": null, "hogql_value": "size", + "id": null, "name": "size", "schema_valid": true, "table": null, @@ -696,6 +755,7 @@ "chain": null, "fields": null, "hogql_value": "event_count", + "id": null, "name": "event_count", "schema_valid": true, "table": null, @@ -705,6 +765,7 @@ "chain": null, "fields": null, "hogql_value": "message_count", + "id": null, "name": "message_count", "schema_valid": true, "table": null, @@ -714,6 +775,7 @@ "chain": null, "fields": null, "hogql_value": "snapshot_source", + "id": null, "name": "snapshot_source", "schema_valid": true, "table": null, @@ -758,6 +820,7 @@ "elements_chain_elements" ], "hogql_value": "events", + "id": null, "name": "events", "schema_valid": true, "table": "events", @@ -770,6 +833,7 @@ ], "fields": null, "hogql_value": "properties", + "id": null, "name": "properties", "schema_valid": true, "table": null, @@ -784,6 +848,7 @@ "person" ], "hogql_value": "pdi", + "id": null, "name": "pdi", "schema_valid": true, "table": "person_distinct_ids", @@ -801,6 +866,7 @@ "level" ], "hogql_value": "console_logs", + "id": null, "name": "console_logs", "schema_valid": true, "table": "console_logs_log_entries", @@ -813,6 +879,7 @@ ], "fields": null, "hogql_value": "person", + "id": null, "name": "person", "schema_valid": true, "table": null, @@ -825,6 +892,7 @@ ], "fields": null, "hogql_value": "person_id", + "id": null, "name": "person_id", "schema_valid": true, "table": null, @@ -865,6 +933,7 @@ "$page_screen_autocapture_count_up_to" ], "hogql_value": "session", + "id": null, "name": "session", "schema_valid": true, "table": "sessions", @@ -874,6 +943,7 @@ "chain": null, "fields": null, "hogql_value": "start_time", + "id": null, "name": "start_time", "schema_valid": true, "table": null, @@ -883,6 +953,7 @@ "chain": null, "fields": null, "hogql_value": "end_time", + "id": null, "name": "end_time", "schema_valid": true, "table": null, @@ -899,6 +970,7 @@ "chain": null, "fields": null, "hogql_value": "person_id", + "id": null, "name": "person_id", "schema_valid": true, "table": null, @@ -908,6 +980,7 @@ "chain": null, "fields": null, "hogql_value": "cohort_id", + "id": null, "name": "cohort_id", "schema_valid": true, "table": null, @@ -926,6 +999,7 @@ "$virt_initial_channel_type" ], "hogql_value": "person", + "id": null, "name": "person", "schema_valid": true, "table": "persons", @@ -942,6 +1016,7 @@ "chain": null, "fields": null, "hogql_value": "person_id", + "id": null, "name": "person_id", "schema_valid": true, "table": null, @@ -951,6 +1026,7 @@ "chain": null, "fields": null, "hogql_value": "cohort_id", + "id": null, "name": "cohort_id", "schema_valid": true, "table": null, @@ -969,6 +1045,7 @@ "$virt_initial_channel_type" ], "hogql_value": "person", + "id": null, "name": "person", "schema_valid": true, "table": "persons", @@ -985,6 +1062,7 @@ "chain": null, "fields": null, "hogql_value": "log_source", + "id": null, "name": "log_source", "schema_valid": true, "table": null, @@ -994,6 +1072,7 @@ "chain": null, "fields": null, "hogql_value": "log_source_id", + "id": null, "name": "log_source_id", "schema_valid": true, "table": null, @@ -1003,6 +1082,7 @@ "chain": null, "fields": null, "hogql_value": "instance_id", + "id": null, "name": "instance_id", "schema_valid": true, "table": null, @@ -1012,6 +1092,7 @@ "chain": null, "fields": null, "hogql_value": "timestamp", + "id": null, "name": "timestamp", "schema_valid": true, "table": null, @@ -1021,6 +1102,7 @@ "chain": null, "fields": null, "hogql_value": "message", + "id": null, "name": "message", "schema_valid": true, "table": null, @@ -1030,6 +1112,7 @@ "chain": null, "fields": null, "hogql_value": "level", + "id": null, "name": "level", "schema_valid": true, "table": null, @@ -1046,6 +1129,7 @@ "chain": null, "fields": null, "hogql_value": "id", + "id": null, "name": "id", "schema_valid": true, "table": null, @@ -1055,6 +1139,7 @@ "chain": null, "fields": null, "hogql_value": "session_id", + "id": null, "name": "session_id", "schema_valid": true, "table": null, @@ -1064,6 +1149,7 @@ "chain": null, "fields": null, "hogql_value": "session_id_v7", + "id": null, "name": "session_id_v7", "schema_valid": true, "table": null, @@ -1073,6 +1159,7 @@ "chain": null, "fields": null, "hogql_value": "distinct_id", + "id": null, "name": "distinct_id", "schema_valid": true, "table": null, @@ -1082,6 +1169,7 @@ "chain": null, "fields": null, "hogql_value": "`$start_timestamp`", + "id": null, "name": "$start_timestamp", "schema_valid": true, "table": null, @@ -1091,6 +1179,7 @@ "chain": null, "fields": null, "hogql_value": "`$end_timestamp`", + "id": null, "name": "$end_timestamp", "schema_valid": true, "table": null, @@ -1100,6 +1189,7 @@ "chain": null, "fields": null, "hogql_value": "`$urls`", + "id": null, "name": "$urls", "schema_valid": true, "table": null, @@ -1109,6 +1199,7 @@ "chain": null, "fields": null, "hogql_value": "`$num_uniq_urls`", + "id": null, "name": "$num_uniq_urls", "schema_valid": true, "table": null, @@ -1118,6 +1209,7 @@ "chain": null, "fields": null, "hogql_value": "`$entry_current_url`", + "id": null, "name": "$entry_current_url", "schema_valid": true, "table": null, @@ -1127,6 +1219,7 @@ "chain": null, "fields": null, "hogql_value": "`$entry_pathname`", + "id": null, "name": "$entry_pathname", "schema_valid": true, "table": null, @@ -1136,6 +1229,7 @@ "chain": null, "fields": null, "hogql_value": "`$end_current_url`", + "id": null, "name": "$end_current_url", "schema_valid": true, "table": null, @@ -1145,6 +1239,7 @@ "chain": null, "fields": null, "hogql_value": "`$end_pathname`", + "id": null, "name": "$end_pathname", "schema_valid": true, "table": null, @@ -1154,6 +1249,7 @@ "chain": null, "fields": null, "hogql_value": "`$entry_utm_source`", + "id": null, "name": "$entry_utm_source", "schema_valid": true, "table": null, @@ -1163,6 +1259,7 @@ "chain": null, "fields": null, "hogql_value": "`$entry_utm_campaign`", + "id": null, "name": "$entry_utm_campaign", "schema_valid": true, "table": null, @@ -1172,6 +1269,7 @@ "chain": null, "fields": null, "hogql_value": "`$entry_utm_medium`", + "id": null, "name": "$entry_utm_medium", "schema_valid": true, "table": null, @@ -1181,6 +1279,7 @@ "chain": null, "fields": null, "hogql_value": "`$entry_utm_term`", + "id": null, "name": "$entry_utm_term", "schema_valid": true, "table": null, @@ -1190,6 +1289,7 @@ "chain": null, "fields": null, "hogql_value": "`$entry_utm_content`", + "id": null, "name": "$entry_utm_content", "schema_valid": true, "table": null, @@ -1199,6 +1299,7 @@ "chain": null, "fields": null, "hogql_value": "`$entry_referring_domain`", + "id": null, "name": "$entry_referring_domain", "schema_valid": true, "table": null, @@ -1208,6 +1309,7 @@ "chain": null, "fields": null, "hogql_value": "`$entry_gclid`", + "id": null, "name": "$entry_gclid", "schema_valid": true, "table": null, @@ -1217,6 +1319,7 @@ "chain": null, "fields": null, "hogql_value": "`$entry_gad_source`", + "id": null, "name": "$entry_gad_source", "schema_valid": true, "table": null, @@ -1226,6 +1329,7 @@ "chain": null, "fields": null, "hogql_value": "`$pageview_count`", + "id": null, "name": "$pageview_count", "schema_valid": true, "table": null, @@ -1235,6 +1339,7 @@ "chain": null, "fields": null, "hogql_value": "`$autocapture_count`", + "id": null, "name": "$autocapture_count", "schema_valid": true, "table": null, @@ -1244,6 +1349,7 @@ "chain": null, "fields": null, "hogql_value": "`$screen_count`", + "id": null, "name": "$screen_count", "schema_valid": true, "table": null, @@ -1253,6 +1359,7 @@ "chain": null, "fields": null, "hogql_value": "`$channel_type`", + "id": null, "name": "$channel_type", "schema_valid": true, "table": null, @@ -1262,6 +1369,7 @@ "chain": null, "fields": null, "hogql_value": "`$session_duration`", + "id": null, "name": "$session_duration", "schema_valid": true, "table": null, @@ -1271,6 +1379,7 @@ "chain": null, "fields": null, "hogql_value": "duration", + "id": null, "name": "duration", "schema_valid": true, "table": null, @@ -1280,6 +1389,7 @@ "chain": null, "fields": null, "hogql_value": "`$is_bounce`", + "id": null, "name": "$is_bounce", "schema_valid": true, "table": null, @@ -1289,6 +1399,7 @@ "chain": null, "fields": null, "hogql_value": "`$last_external_click_url`", + "id": null, "name": "$last_external_click_url", "schema_valid": true, "table": null, @@ -1305,6 +1416,7 @@ "chain": null, "fields": null, "hogql_value": "session_id", + "id": null, "name": "session_id", "schema_valid": true, "table": null, @@ -1314,6 +1426,7 @@ "chain": null, "fields": null, "hogql_value": "distinct_id", + "id": null, "name": "distinct_id", "schema_valid": true, "table": null, @@ -1323,6 +1436,7 @@ "chain": null, "fields": null, "hogql_value": "x", + "id": null, "name": "x", "schema_valid": true, "table": null, @@ -1332,6 +1446,7 @@ "chain": null, "fields": null, "hogql_value": "y", + "id": null, "name": "y", "schema_valid": true, "table": null, @@ -1341,6 +1456,7 @@ "chain": null, "fields": null, "hogql_value": "scale_factor", + "id": null, "name": "scale_factor", "schema_valid": true, "table": null, @@ -1350,6 +1466,7 @@ "chain": null, "fields": null, "hogql_value": "viewport_width", + "id": null, "name": "viewport_width", "schema_valid": true, "table": null, @@ -1359,6 +1476,7 @@ "chain": null, "fields": null, "hogql_value": "viewport_height", + "id": null, "name": "viewport_height", "schema_valid": true, "table": null, @@ -1368,6 +1486,7 @@ "chain": null, "fields": null, "hogql_value": "pointer_target_fixed", + "id": null, "name": "pointer_target_fixed", "schema_valid": true, "table": null, @@ -1377,6 +1496,7 @@ "chain": null, "fields": null, "hogql_value": "current_url", + "id": null, "name": "current_url", "schema_valid": true, "table": null, @@ -1386,6 +1506,7 @@ "chain": null, "fields": null, "hogql_value": "timestamp", + "id": null, "name": "timestamp", "schema_valid": true, "table": null, @@ -1395,6 +1516,7 @@ "chain": null, "fields": null, "hogql_value": "type", + "id": null, "name": "type", "schema_valid": true, "table": null, @@ -1417,6 +1539,7 @@ "chain": null, "fields": null, "hogql_value": "uuid", + "id": null, "name": "uuid", "schema_valid": true, "table": null, @@ -1426,6 +1549,7 @@ "chain": null, "fields": null, "hogql_value": "event", + "id": null, "name": "event", "schema_valid": true, "table": null, @@ -1435,6 +1559,7 @@ "chain": null, "fields": null, "hogql_value": "properties", + "id": null, "name": "properties", "schema_valid": true, "table": null, @@ -1444,6 +1569,7 @@ "chain": null, "fields": null, "hogql_value": "timestamp", + "id": null, "name": "timestamp", "schema_valid": true, "table": null, @@ -1453,6 +1579,7 @@ "chain": null, "fields": null, "hogql_value": "distinct_id", + "id": null, "name": "distinct_id", "schema_valid": true, "table": null, @@ -1462,6 +1589,7 @@ "chain": null, "fields": null, "hogql_value": "elements_chain", + "id": null, "name": "elements_chain", "schema_valid": true, "table": null, @@ -1471,6 +1599,7 @@ "chain": null, "fields": null, "hogql_value": "created_at", + "id": null, "name": "created_at", "schema_valid": true, "table": null, @@ -1480,6 +1609,7 @@ "chain": null, "fields": null, "hogql_value": "`$session_id`", + "id": null, "name": "$session_id", "schema_valid": true, "table": null, @@ -1489,6 +1619,7 @@ "chain": null, "fields": null, "hogql_value": "`$window_id`", + "id": null, "name": "$window_id", "schema_valid": true, "table": null, @@ -1503,6 +1634,7 @@ "person" ], "hogql_value": "pdi", + "id": null, "name": "pdi", "schema_valid": true, "table": "person_distinct_ids", @@ -1516,6 +1648,7 @@ "properties" ], "hogql_value": "poe", + "id": null, "name": "poe", "schema_valid": true, "table": "events", @@ -1529,6 +1662,7 @@ "properties" ], "hogql_value": "goe_0", + "id": null, "name": "goe_0", "schema_valid": true, "table": "events", @@ -1542,6 +1676,7 @@ "properties" ], "hogql_value": "goe_1", + "id": null, "name": "goe_1", "schema_valid": true, "table": "events", @@ -1555,6 +1690,7 @@ "properties" ], "hogql_value": "goe_2", + "id": null, "name": "goe_2", "schema_valid": true, "table": "events", @@ -1568,6 +1704,7 @@ "properties" ], "hogql_value": "goe_3", + "id": null, "name": "goe_3", "schema_valid": true, "table": "events", @@ -1581,6 +1718,7 @@ "properties" ], "hogql_value": "goe_4", + "id": null, "name": "goe_4", "schema_valid": true, "table": "events", @@ -1592,6 +1730,7 @@ ], "fields": null, "hogql_value": "person", + "id": null, "name": "person", "schema_valid": true, "table": null, @@ -1601,6 +1740,7 @@ "chain": null, "fields": null, "hogql_value": "person_id", + "id": null, "name": "person_id", "schema_valid": true, "table": null, @@ -1610,6 +1750,7 @@ "chain": null, "fields": null, "hogql_value": "`$group_0`", + "id": null, "name": "$group_0", "schema_valid": true, "table": null, @@ -1626,6 +1767,7 @@ "properties" ], "hogql_value": "group_0", + "id": null, "name": "group_0", "schema_valid": true, "table": "groups", @@ -1635,6 +1777,7 @@ "chain": null, "fields": null, "hogql_value": "`$group_1`", + "id": null, "name": "$group_1", "schema_valid": true, "table": null, @@ -1651,6 +1794,7 @@ "properties" ], "hogql_value": "group_1", + "id": null, "name": "group_1", "schema_valid": true, "table": "groups", @@ -1660,6 +1804,7 @@ "chain": null, "fields": null, "hogql_value": "`$group_2`", + "id": null, "name": "$group_2", "schema_valid": true, "table": null, @@ -1676,6 +1821,7 @@ "properties" ], "hogql_value": "group_2", + "id": null, "name": "group_2", "schema_valid": true, "table": "groups", @@ -1685,6 +1831,7 @@ "chain": null, "fields": null, "hogql_value": "`$group_3`", + "id": null, "name": "$group_3", "schema_valid": true, "table": null, @@ -1701,6 +1848,7 @@ "properties" ], "hogql_value": "group_3", + "id": null, "name": "group_3", "schema_valid": true, "table": "groups", @@ -1710,6 +1858,7 @@ "chain": null, "fields": null, "hogql_value": "`$group_4`", + "id": null, "name": "$group_4", "schema_valid": true, "table": null, @@ -1726,6 +1875,7 @@ "properties" ], "hogql_value": "group_4", + "id": null, "name": "group_4", "schema_valid": true, "table": "groups", @@ -1766,6 +1916,7 @@ "$page_screen_autocapture_count_up_to" ], "hogql_value": "session", + "id": null, "name": "session", "schema_valid": true, "table": "sessions", @@ -1775,6 +1926,7 @@ "chain": null, "fields": null, "hogql_value": "elements_chain_href", + "id": null, "name": "elements_chain_href", "schema_valid": true, "table": null, @@ -1784,6 +1936,7 @@ "chain": null, "fields": null, "hogql_value": "elements_chain_texts", + "id": null, "name": "elements_chain_texts", "schema_valid": true, "table": null, @@ -1793,6 +1946,7 @@ "chain": null, "fields": null, "hogql_value": "elements_chain_ids", + "id": null, "name": "elements_chain_ids", "schema_valid": true, "table": null, @@ -1802,6 +1956,7 @@ "chain": null, "fields": null, "hogql_value": "elements_chain_elements", + "id": null, "name": "elements_chain_elements", "schema_valid": true, "table": null, @@ -1818,6 +1973,7 @@ "chain": null, "fields": null, "hogql_value": "index", + "id": null, "name": "index", "schema_valid": true, "table": null, @@ -1827,6 +1983,7 @@ "chain": null, "fields": null, "hogql_value": "key", + "id": null, "name": "key", "schema_valid": true, "table": null, @@ -1836,6 +1993,7 @@ "chain": null, "fields": null, "hogql_value": "created_at", + "id": null, "name": "created_at", "schema_valid": true, "table": null, @@ -1845,6 +2003,7 @@ "chain": null, "fields": null, "hogql_value": "updated_at", + "id": null, "name": "updated_at", "schema_valid": true, "table": null, @@ -1854,6 +2013,7 @@ "chain": null, "fields": null, "hogql_value": "properties", + "id": null, "name": "properties", "schema_valid": true, "table": null, @@ -1870,6 +2030,7 @@ "chain": null, "fields": null, "hogql_value": "id", + "id": null, "name": "id", "schema_valid": true, "table": null, @@ -1879,6 +2040,7 @@ "chain": null, "fields": null, "hogql_value": "created_at", + "id": null, "name": "created_at", "schema_valid": true, "table": null, @@ -1888,6 +2050,7 @@ "chain": null, "fields": null, "hogql_value": "properties", + "id": null, "name": "properties", "schema_valid": true, "table": null, @@ -1897,6 +2060,7 @@ "chain": null, "fields": null, "hogql_value": "is_identified", + "id": null, "name": "is_identified", "schema_valid": true, "table": null, @@ -1910,6 +2074,7 @@ "person_id" ], "hogql_value": "pdi", + "id": null, "name": "pdi", "schema_valid": true, "table": "person_distinct_ids", @@ -1919,6 +2084,7 @@ "chain": null, "fields": null, "hogql_value": "`$virt_initial_referring_domain_type`", + "id": null, "name": "$virt_initial_referring_domain_type", "schema_valid": true, "table": null, @@ -1928,6 +2094,7 @@ "chain": null, "fields": null, "hogql_value": "`$virt_initial_channel_type`", + "id": null, "name": "$virt_initial_channel_type", "schema_valid": true, "table": null, @@ -1944,6 +2111,7 @@ "chain": null, "fields": null, "hogql_value": "distinct_id", + "id": null, "name": "distinct_id", "schema_valid": true, "table": null, @@ -1953,6 +2121,7 @@ "chain": null, "fields": null, "hogql_value": "person_id", + "id": null, "name": "person_id", "schema_valid": true, "table": null, @@ -1971,6 +2140,7 @@ "$virt_initial_channel_type" ], "hogql_value": "person", + "id": null, "name": "person", "schema_valid": true, "table": "persons", @@ -1987,6 +2157,7 @@ "chain": null, "fields": null, "hogql_value": "session_id", + "id": null, "name": "session_id", "schema_valid": true, "table": null, @@ -1996,6 +2167,7 @@ "chain": null, "fields": null, "hogql_value": "distinct_id", + "id": null, "name": "distinct_id", "schema_valid": true, "table": null, @@ -2005,6 +2177,7 @@ "chain": null, "fields": null, "hogql_value": "first_url", + "id": null, "name": "first_url", "schema_valid": true, "table": null, @@ -2014,6 +2187,7 @@ "chain": null, "fields": null, "hogql_value": "click_count", + "id": null, "name": "click_count", "schema_valid": true, "table": null, @@ -2023,6 +2197,7 @@ "chain": null, "fields": null, "hogql_value": "keypress_count", + "id": null, "name": "keypress_count", "schema_valid": true, "table": null, @@ -2032,6 +2207,7 @@ "chain": null, "fields": null, "hogql_value": "mouse_activity_count", + "id": null, "name": "mouse_activity_count", "schema_valid": true, "table": null, @@ -2041,6 +2217,7 @@ "chain": null, "fields": null, "hogql_value": "active_milliseconds", + "id": null, "name": "active_milliseconds", "schema_valid": true, "table": null, @@ -2050,6 +2227,7 @@ "chain": null, "fields": null, "hogql_value": "console_log_count", + "id": null, "name": "console_log_count", "schema_valid": true, "table": null, @@ -2059,6 +2237,7 @@ "chain": null, "fields": null, "hogql_value": "console_warn_count", + "id": null, "name": "console_warn_count", "schema_valid": true, "table": null, @@ -2068,6 +2247,7 @@ "chain": null, "fields": null, "hogql_value": "console_error_count", + "id": null, "name": "console_error_count", "schema_valid": true, "table": null, @@ -2077,6 +2257,7 @@ "chain": null, "fields": null, "hogql_value": "size", + "id": null, "name": "size", "schema_valid": true, "table": null, @@ -2086,6 +2267,7 @@ "chain": null, "fields": null, "hogql_value": "event_count", + "id": null, "name": "event_count", "schema_valid": true, "table": null, @@ -2095,6 +2277,7 @@ "chain": null, "fields": null, "hogql_value": "message_count", + "id": null, "name": "message_count", "schema_valid": true, "table": null, @@ -2104,6 +2287,7 @@ "chain": null, "fields": null, "hogql_value": "snapshot_source", + "id": null, "name": "snapshot_source", "schema_valid": true, "table": null, @@ -2148,6 +2332,7 @@ "elements_chain_elements" ], "hogql_value": "events", + "id": null, "name": "events", "schema_valid": true, "table": "events", @@ -2160,6 +2345,7 @@ ], "fields": null, "hogql_value": "properties", + "id": null, "name": "properties", "schema_valid": true, "table": null, @@ -2174,6 +2360,7 @@ "person" ], "hogql_value": "pdi", + "id": null, "name": "pdi", "schema_valid": true, "table": "person_distinct_ids", @@ -2191,6 +2378,7 @@ "level" ], "hogql_value": "console_logs", + "id": null, "name": "console_logs", "schema_valid": true, "table": "console_logs_log_entries", @@ -2203,6 +2391,7 @@ ], "fields": null, "hogql_value": "person", + "id": null, "name": "person", "schema_valid": true, "table": null, @@ -2215,6 +2404,7 @@ ], "fields": null, "hogql_value": "person_id", + "id": null, "name": "person_id", "schema_valid": true, "table": null, @@ -2255,6 +2445,7 @@ "$page_screen_autocapture_count_up_to" ], "hogql_value": "session", + "id": null, "name": "session", "schema_valid": true, "table": "sessions", @@ -2264,6 +2455,7 @@ "chain": null, "fields": null, "hogql_value": "start_time", + "id": null, "name": "start_time", "schema_valid": true, "table": null, @@ -2273,6 +2465,7 @@ "chain": null, "fields": null, "hogql_value": "end_time", + "id": null, "name": "end_time", "schema_valid": true, "table": null, @@ -2289,6 +2482,7 @@ "chain": null, "fields": null, "hogql_value": "person_id", + "id": null, "name": "person_id", "schema_valid": true, "table": null, @@ -2298,6 +2492,7 @@ "chain": null, "fields": null, "hogql_value": "cohort_id", + "id": null, "name": "cohort_id", "schema_valid": true, "table": null, @@ -2316,6 +2511,7 @@ "$virt_initial_channel_type" ], "hogql_value": "person", + "id": null, "name": "person", "schema_valid": true, "table": "persons", @@ -2332,6 +2528,7 @@ "chain": null, "fields": null, "hogql_value": "person_id", + "id": null, "name": "person_id", "schema_valid": true, "table": null, @@ -2341,6 +2538,7 @@ "chain": null, "fields": null, "hogql_value": "cohort_id", + "id": null, "name": "cohort_id", "schema_valid": true, "table": null, @@ -2359,6 +2557,7 @@ "$virt_initial_channel_type" ], "hogql_value": "person", + "id": null, "name": "person", "schema_valid": true, "table": "persons", @@ -2375,6 +2574,7 @@ "chain": null, "fields": null, "hogql_value": "log_source", + "id": null, "name": "log_source", "schema_valid": true, "table": null, @@ -2384,6 +2584,7 @@ "chain": null, "fields": null, "hogql_value": "log_source_id", + "id": null, "name": "log_source_id", "schema_valid": true, "table": null, @@ -2393,6 +2594,7 @@ "chain": null, "fields": null, "hogql_value": "instance_id", + "id": null, "name": "instance_id", "schema_valid": true, "table": null, @@ -2402,6 +2604,7 @@ "chain": null, "fields": null, "hogql_value": "timestamp", + "id": null, "name": "timestamp", "schema_valid": true, "table": null, @@ -2411,6 +2614,7 @@ "chain": null, "fields": null, "hogql_value": "message", + "id": null, "name": "message", "schema_valid": true, "table": null, @@ -2420,6 +2624,7 @@ "chain": null, "fields": null, "hogql_value": "level", + "id": null, "name": "level", "schema_valid": true, "table": null, @@ -2436,6 +2641,7 @@ "chain": null, "fields": null, "hogql_value": "id", + "id": null, "name": "id", "schema_valid": true, "table": null, @@ -2445,6 +2651,7 @@ "chain": null, "fields": null, "hogql_value": "session_id", + "id": null, "name": "session_id", "schema_valid": true, "table": null, @@ -2454,6 +2661,7 @@ "chain": null, "fields": null, "hogql_value": "session_id_v7", + "id": null, "name": "session_id_v7", "schema_valid": true, "table": null, @@ -2463,6 +2671,7 @@ "chain": null, "fields": null, "hogql_value": "distinct_id", + "id": null, "name": "distinct_id", "schema_valid": true, "table": null, @@ -2472,6 +2681,7 @@ "chain": null, "fields": null, "hogql_value": "`$start_timestamp`", + "id": null, "name": "$start_timestamp", "schema_valid": true, "table": null, @@ -2481,6 +2691,7 @@ "chain": null, "fields": null, "hogql_value": "`$end_timestamp`", + "id": null, "name": "$end_timestamp", "schema_valid": true, "table": null, @@ -2490,6 +2701,7 @@ "chain": null, "fields": null, "hogql_value": "`$urls`", + "id": null, "name": "$urls", "schema_valid": true, "table": null, @@ -2499,6 +2711,7 @@ "chain": null, "fields": null, "hogql_value": "`$num_uniq_urls`", + "id": null, "name": "$num_uniq_urls", "schema_valid": true, "table": null, @@ -2508,6 +2721,7 @@ "chain": null, "fields": null, "hogql_value": "`$entry_current_url`", + "id": null, "name": "$entry_current_url", "schema_valid": true, "table": null, @@ -2517,6 +2731,7 @@ "chain": null, "fields": null, "hogql_value": "`$entry_pathname`", + "id": null, "name": "$entry_pathname", "schema_valid": true, "table": null, @@ -2526,6 +2741,7 @@ "chain": null, "fields": null, "hogql_value": "`$end_current_url`", + "id": null, "name": "$end_current_url", "schema_valid": true, "table": null, @@ -2535,6 +2751,7 @@ "chain": null, "fields": null, "hogql_value": "`$end_pathname`", + "id": null, "name": "$end_pathname", "schema_valid": true, "table": null, @@ -2544,6 +2761,7 @@ "chain": null, "fields": null, "hogql_value": "`$entry_utm_source`", + "id": null, "name": "$entry_utm_source", "schema_valid": true, "table": null, @@ -2553,6 +2771,7 @@ "chain": null, "fields": null, "hogql_value": "`$entry_utm_campaign`", + "id": null, "name": "$entry_utm_campaign", "schema_valid": true, "table": null, @@ -2562,6 +2781,7 @@ "chain": null, "fields": null, "hogql_value": "`$entry_utm_medium`", + "id": null, "name": "$entry_utm_medium", "schema_valid": true, "table": null, @@ -2571,6 +2791,7 @@ "chain": null, "fields": null, "hogql_value": "`$entry_utm_term`", + "id": null, "name": "$entry_utm_term", "schema_valid": true, "table": null, @@ -2580,6 +2801,7 @@ "chain": null, "fields": null, "hogql_value": "`$entry_utm_content`", + "id": null, "name": "$entry_utm_content", "schema_valid": true, "table": null, @@ -2589,6 +2811,7 @@ "chain": null, "fields": null, "hogql_value": "`$entry_referring_domain`", + "id": null, "name": "$entry_referring_domain", "schema_valid": true, "table": null, @@ -2598,6 +2821,7 @@ "chain": null, "fields": null, "hogql_value": "`$entry_gclid`", + "id": null, "name": "$entry_gclid", "schema_valid": true, "table": null, @@ -2607,6 +2831,7 @@ "chain": null, "fields": null, "hogql_value": "`$entry_gad_source`", + "id": null, "name": "$entry_gad_source", "schema_valid": true, "table": null, @@ -2616,6 +2841,7 @@ "chain": null, "fields": null, "hogql_value": "`$pageview_count`", + "id": null, "name": "$pageview_count", "schema_valid": true, "table": null, @@ -2625,6 +2851,7 @@ "chain": null, "fields": null, "hogql_value": "`$autocapture_count`", + "id": null, "name": "$autocapture_count", "schema_valid": true, "table": null, @@ -2634,6 +2861,7 @@ "chain": null, "fields": null, "hogql_value": "`$screen_count`", + "id": null, "name": "$screen_count", "schema_valid": true, "table": null, @@ -2643,6 +2871,7 @@ "chain": null, "fields": null, "hogql_value": "`$channel_type`", + "id": null, "name": "$channel_type", "schema_valid": true, "table": null, @@ -2652,6 +2881,7 @@ "chain": null, "fields": null, "hogql_value": "`$session_duration`", + "id": null, "name": "$session_duration", "schema_valid": true, "table": null, @@ -2661,6 +2891,7 @@ "chain": null, "fields": null, "hogql_value": "duration", + "id": null, "name": "duration", "schema_valid": true, "table": null, @@ -2670,6 +2901,7 @@ "chain": null, "fields": null, "hogql_value": "`$is_bounce`", + "id": null, "name": "$is_bounce", "schema_valid": true, "table": null, @@ -2679,6 +2911,7 @@ "chain": null, "fields": null, "hogql_value": "`$last_external_click_url`", + "id": null, "name": "$last_external_click_url", "schema_valid": true, "table": null, @@ -2695,6 +2928,7 @@ "chain": null, "fields": null, "hogql_value": "session_id", + "id": null, "name": "session_id", "schema_valid": true, "table": null, @@ -2704,6 +2938,7 @@ "chain": null, "fields": null, "hogql_value": "distinct_id", + "id": null, "name": "distinct_id", "schema_valid": true, "table": null, @@ -2713,6 +2948,7 @@ "chain": null, "fields": null, "hogql_value": "x", + "id": null, "name": "x", "schema_valid": true, "table": null, @@ -2722,6 +2958,7 @@ "chain": null, "fields": null, "hogql_value": "y", + "id": null, "name": "y", "schema_valid": true, "table": null, @@ -2731,6 +2968,7 @@ "chain": null, "fields": null, "hogql_value": "scale_factor", + "id": null, "name": "scale_factor", "schema_valid": true, "table": null, @@ -2740,6 +2978,7 @@ "chain": null, "fields": null, "hogql_value": "viewport_width", + "id": null, "name": "viewport_width", "schema_valid": true, "table": null, @@ -2749,6 +2988,7 @@ "chain": null, "fields": null, "hogql_value": "viewport_height", + "id": null, "name": "viewport_height", "schema_valid": true, "table": null, @@ -2758,6 +2998,7 @@ "chain": null, "fields": null, "hogql_value": "pointer_target_fixed", + "id": null, "name": "pointer_target_fixed", "schema_valid": true, "table": null, @@ -2767,6 +3008,7 @@ "chain": null, "fields": null, "hogql_value": "current_url", + "id": null, "name": "current_url", "schema_valid": true, "table": null, @@ -2776,6 +3018,7 @@ "chain": null, "fields": null, "hogql_value": "timestamp", + "id": null, "name": "timestamp", "schema_valid": true, "table": null, @@ -2785,6 +3028,7 @@ "chain": null, "fields": null, "hogql_value": "type", + "id": null, "name": "type", "schema_valid": true, "table": null, diff --git a/posthog/hogql/database/test/tables.py b/posthog/hogql/database/test/tables.py index f3328091791b7..39e651ee3f82d 100644 --- a/posthog/hogql/database/test/tables.py +++ b/posthog/hogql/database/test/tables.py @@ -26,6 +26,7 @@ def create_aapl_stock_s3_table(name="aapl_stock") -> S3Table: def create_aapl_stock_table_view() -> SavedQuery: return SavedQuery( + id="aapl_stock_view", name="aapl_stock_view", query="SELECT * FROM aapl_stock", fields={ @@ -39,6 +40,7 @@ def create_aapl_stock_table_view() -> SavedQuery: def create_nested_aapl_stock_view() -> SavedQuery: return SavedQuery( + id="aapl_stock_nested_view", name="aapl_stock_nested_view", query="SELECT * FROM aapl_stock_view", fields={ @@ -52,6 +54,7 @@ def create_nested_aapl_stock_view() -> SavedQuery: def create_aapl_stock_table_self_referencing() -> SavedQuery: return SavedQuery( + id="aapl_stock_self", name="aapl_stock_self", query="SELECT * FROM aapl_stock_self", fields={ diff --git a/posthog/hogql/functions/mapping.py b/posthog/hogql/functions/mapping.py index 232e99e7a064e..586eed0c4a274 100644 --- a/posthog/hogql/functions/mapping.py +++ b/posthog/hogql/functions/mapping.py @@ -470,6 +470,7 @@ def compare_types(arg_types: list[ConstantType], sig_arg_types: tuple[ConstantTy "timeStampSub": HogQLFunctionMeta("timeStampSub", 2, 2), "now": HogQLFunctionMeta("now64", 0, 1, tz_aware=True, case_sensitive=False), "nowInBlock": HogQLFunctionMeta("nowInBlock", 1, 1), + "rowNumberInBlock": HogQLFunctionMeta("rowNumberInBlock", 0, 0), "rowNumberInAllBlocks": HogQLFunctionMeta("rowNumberInAllBlocks", 0, 0), "today": HogQLFunctionMeta("today"), "yesterday": HogQLFunctionMeta("yesterday"), @@ -833,6 +834,14 @@ def compare_types(arg_types: list[ConstantType], sig_arg_types: tuple[ConstantTy "leadInFrame": HogQLFunctionMeta("leadInFrame", 1, 1), # table functions "generateSeries": HogQLFunctionMeta("generate_series", 3, 3), + ## UDFS + "aggregate_funnel": HogQLFunctionMeta("aggregate_funnel", 6, 6, aggregate=False), + "aggregate_funnel_array": HogQLFunctionMeta("aggregate_funnel_array", 6, 6, aggregate=False), + "aggregate_funnel_cohort": HogQLFunctionMeta("aggregate_funnel_cohort", 6, 6, aggregate=False), + "aggregate_funnel_trends": HogQLFunctionMeta("aggregate_funnel_trends", 7, 7, aggregate=False), + "aggregate_funnel_array_trends": HogQLFunctionMeta("aggregate_funnel_array_trends", 7, 7, aggregate=False), + "aggregate_funnel_cohort_trends": HogQLFunctionMeta("aggregate_funnel_cohort_trends", 7, 7, aggregate=False), + "aggregate_funnel_test": HogQLFunctionMeta("aggregate_funnel_test", 6, 6, aggregate=False), } # Permitted HogQL aggregations HOGQL_AGGREGATIONS: dict[str, HogQLFunctionMeta] = { @@ -884,7 +893,7 @@ def compare_types(arg_types: list[ConstantType], sig_arg_types: tuple[ConstantTy # "topKWeighted": HogQLFunctionMeta("topKWeighted", 1, 1, aggregate=True), # "topKWeightedIf": HogQLFunctionMeta("topKWeightedIf", 2, 2, aggregate=True), "groupArray": HogQLFunctionMeta("groupArray", 1, 1, aggregate=True), - # "groupArrayIf": HogQLFunctionMeta("groupArrayIf", 2, 2, aggregate=True), + "groupArrayIf": HogQLFunctionMeta("groupArrayIf", 2, 2, aggregate=True), # "groupArrayLast": HogQLFunctionMeta("groupArrayLast", 1, 1, aggregate=True), # "groupArrayLastIf": HogQLFunctionMeta("groupArrayLastIf", 2, 2, aggregate=True), "groupUniqArray": HogQLFunctionMeta("groupUniqArray", 1, 1, aggregate=True), diff --git a/posthog/hogql/property.py b/posthog/hogql/property.py index bce1453e561f8..65db42700c958 100644 --- a/posthog/hogql/property.py +++ b/posthog/hogql/property.py @@ -550,23 +550,26 @@ def action_to_expr(action: Action) -> ast.Expr: if step.text is not None: value = step.text if step.text_matching == "regex": - match = ast.CompareOperationOp.Regex + exprs.append( + parse_expr( + "arrayExists(x -> x =~ {value}, elements_chain_texts)", + {"value": ast.Constant(value=value)}, + ) + ) elif step.text_matching == "contains": - match = ast.CompareOperationOp.ILike - value = f"%{value}%" + exprs.append( + parse_expr( + "arrayExists(x -> x ilike {value}, elements_chain_texts)", + {"value": ast.Constant(value=f"%{value}%")}, + ) + ) else: - match = ast.CompareOperationOp.Eq - - exprs.append( - parse_expr( - "arrayExists(x -> {match}, elements_chain_texts)", - { - "match": ast.CompareOperation( - op=match, left=ast.Field(chain=["x"]), right=ast.Constant(value=value) - ) - }, + exprs.append( + parse_expr( + "arrayExists(x -> x = {value}, elements_chain_texts)", + {"value": ast.Constant(value=value)}, + ) ) - ) if step.url: if step.url_matching == "exact": expr = parse_expr( diff --git a/posthog/hogql/test/test_property.py b/posthog/hogql/test/test_property.py index 02aa63b9cb856..fb2f2f230973d 100644 --- a/posthog/hogql/test/test_property.py +++ b/posthog/hogql/test/test_property.py @@ -649,6 +649,24 @@ def test_action_to_expr(self): self._parse_expr("event = '$autocapture' and arrayExists(x -> x =~ 'blabla', elements_chain_texts)"), ) + action7 = Action.objects.create( + team=self.team, + steps_json=[{"event": "$autocapture", "text": "blabla", "text_matching": "contains"}], + ) + self.assertEqual( + clear_locations(action_to_expr(action7)), + self._parse_expr("event = '$autocapture' and arrayExists(x -> x ilike '%blabla%', elements_chain_texts)"), + ) + + action8 = Action.objects.create( + team=self.team, + steps_json=[{"event": "$autocapture", "text": "blabla", "text_matching": "exact"}], + ) + self.assertEqual( + clear_locations(action_to_expr(action8)), + self._parse_expr("event = '$autocapture' and arrayExists(x -> x = 'blabla', elements_chain_texts)"), + ) + def test_cohort_filter_static(self): cohort = Cohort.objects.create( team=self.team, diff --git a/posthog/hogql_queries/events_query_runner.py b/posthog/hogql_queries/events_query_runner.py index c1ebe56127a0b..034a888158329 100644 --- a/posthog/hogql_queries/events_query_runner.py +++ b/posthog/hogql_queries/events_query_runner.py @@ -1,10 +1,10 @@ -import json from datetime import timedelta from typing import Optional from dateutil.parser import isoparse from django.db.models import Prefetch from django.utils.timezone import now +import orjson from posthog.api.element import ElementSerializer from posthog.api.utils import get_pk_or_uuid @@ -206,7 +206,7 @@ def calculate(self) -> EventsQueryResponse: self.paginator.results[index] = list(result) select = result[star_idx] new_result = dict(zip(SELECT_STAR_FROM_EVENTS_FIELDS, select)) - new_result["properties"] = json.loads(new_result["properties"]) + new_result["properties"] = orjson.loads(new_result["properties"]) if new_result["elements_chain"]: new_result["elements"] = ElementSerializer( chain_to_elements(new_result["elements_chain"]), many=True diff --git a/posthog/hogql_queries/experiment_result_query_runner.py b/posthog/hogql_queries/experiment_result_query_runner.py new file mode 100644 index 0000000000000..7dcec0d11a9de --- /dev/null +++ b/posthog/hogql_queries/experiment_result_query_runner.py @@ -0,0 +1,76 @@ +from posthog.hogql import ast +from posthog.hogql_queries.query_runner import QueryRunner, get_query_runner +from posthog.schema import ( + ExperimentResultFunnelQueryResponse, + ExperimentResultQuery, + ExperimentResultTrendQueryResponse, + ExperimentVariantTrendResult, + ExperimentVariantFunnelResult, + TrendsQuery, + FunnelsQuery, +) +from typing import Any, Union + +from posthog.models.filters.mixins.utils import cached_property + + +class ExperimentResultQueryRunner(QueryRunner): + query: ExperimentResultQuery + + @cached_property + def source_runner(self) -> QueryRunner: + return get_query_runner(self.query.source, self.team, self.timings, self.limit_context) + + def calculate(self) -> Union[ExperimentResultTrendQueryResponse, ExperimentResultFunnelQueryResponse]: + source_query = self.query.source + if isinstance(source_query, TrendsQuery): + return self._calculate_trends() + elif isinstance(source_query, FunnelsQuery): + return self._calculate_funnels() + else: + raise ValueError(f"Unsupported query type: {type(source_query)}") + + def _calculate_trends(self) -> ExperimentResultTrendQueryResponse: + trends_response = self.source_runner.calculate() + results = self._process_trends_results(trends_response.results) + return ExperimentResultTrendQueryResponse(insight="TRENDS", results=results) + + def _calculate_funnels(self) -> ExperimentResultFunnelQueryResponse: + funnels_response = self.source_runner.calculate() + results = self._process_funnels_results(funnels_response.results) + return ExperimentResultFunnelQueryResponse(insight="FUNNELS", results=results) + + def _process_trends_results(self, trends_results: list[dict[str, Any]]) -> dict[str, ExperimentVariantTrendResult]: + variants = self.query.variants + processed_results = {variant: ExperimentVariantTrendResult(count=0) for variant in variants} + + for result in trends_results: + variant = result.get("breakdown_value") + if variant in variants: + processed_results[variant].count += result.get("count", 0) + + return processed_results + + def _process_funnels_results( + self, funnels_results: list[list[dict[str, Any]]] + ) -> dict[str, ExperimentVariantFunnelResult]: + variants = self.query.variants + processed_results = { + variant: ExperimentVariantFunnelResult(success_count=0, failure_count=0) for variant in variants + } + + for result in funnels_results: + first_step = result[0] + last_step = result[-1] + variant = first_step.get("breakdown_value") + variant_str = variant[0] if isinstance(variant, list) else str(variant) + if variant_str in variants: + total_count = first_step.get("count", 0) + success_count = last_step.get("count", 0) if len(result) > 1 else 0 + processed_results[variant_str].success_count = success_count + processed_results[variant_str].failure_count = total_count - success_count + + return processed_results + + def to_query(self) -> ast.SelectQuery: + raise ValueError(f"Cannot convert source query of type {self.query.source.kind} to query") diff --git a/posthog/hogql_queries/insights/funnels/__init__.py b/posthog/hogql_queries/insights/funnels/__init__.py index 8a20d9784df8b..787cd01ec887d 100644 --- a/posthog/hogql_queries/insights/funnels/__init__.py +++ b/posthog/hogql_queries/insights/funnels/__init__.py @@ -1,5 +1,6 @@ from .base import FunnelBase from .funnel import Funnel +from .funnel_udf import FunnelUDF from .funnel_strict import FunnelStrict from .funnel_unordered import FunnelUnordered from .funnel_time_to_convert import FunnelTimeToConvert diff --git a/posthog/hogql_queries/insights/funnels/base.py b/posthog/hogql_queries/insights/funnels/base.py index 477c205dd968c..d5757225246f4 100644 --- a/posthog/hogql_queries/insights/funnels/base.py +++ b/posthog/hogql_queries/insights/funnels/base.py @@ -33,6 +33,7 @@ FunnelExclusionActionsNode, FunnelTimeToConvertResults, FunnelVizType, + FunnelExclusionEventsNode, ) from posthog.types import EntityNode, ExclusionEntityNode @@ -299,7 +300,9 @@ def _serialize_step( action_id = step.event type = "events" elif isinstance(step, DataWarehouseNode): - raise NotImplementedError("DataWarehouseNode is not supported in funnels") + raise ValidationError( + "Data warehouse tables are not supported in funnels just yet. For now, please try this funnel without the data warehouse-based step." + ) else: action = Action.objects.get(pk=step.id) name = action.name @@ -408,6 +411,92 @@ def _get_inner_event_query( return funnel_events_query + # This version of the inner event query modifies how exclusions are returned to + # make them behave more like steps. It returns a boolean "exclusion_{0..n}" for each event + def _get_inner_event_query_for_udf( + self, + entities: list[EntityNode] | None = None, + entity_name="events", + skip_entity_filter=False, + skip_step_filter=False, + ) -> ast.SelectQuery: + query, funnelsFilter, breakdown, breakdownType, breakdownAttributionType = ( + self.context.query, + self.context.funnelsFilter, + self.context.breakdown, + self.context.breakdownType, + self.context.breakdownAttributionType, + ) + entities_to_use = entities or query.series + + extra_fields: list[str] = [] + + for prop in self.context.includeProperties: + extra_fields.append(prop) + + funnel_events_query = FunnelEventQuery( + context=self.context, + extra_fields=[*self._extra_event_fields, *extra_fields], + extra_event_properties=self._extra_event_properties, + ).to_query( + skip_entity_filter=skip_entity_filter, + ) + # funnel_events_query, params = FunnelEventQuery( + # extra_fields=[*self._extra_event_fields, *extra_fields], + # extra_event_properties=self._extra_event_properties, + # ).get_query(entities_to_use, entity_name, skip_entity_filter=skip_entity_filter) + + all_step_cols: list[ast.Expr] = [] + all_exclusions: list[list[FunnelExclusionEventsNode | FunnelExclusionActionsNode]] = [] + for index, entity in enumerate(entities_to_use): + step_cols = self._get_step_col(entity, index, entity_name) + all_step_cols.extend(step_cols) + all_exclusions.append([]) + + for excluded_entity in funnelsFilter.exclusions or []: + for i in range(excluded_entity.funnelFromStep + 1, excluded_entity.funnelToStep + 1): + all_exclusions[i].append(excluded_entity) + + for index, exclusions in enumerate(all_exclusions): + exclusion_col_expr = self._get_exclusions_col(exclusions, index, entity_name) + all_step_cols.append(exclusion_col_expr) + + breakdown_select_prop = self._get_breakdown_select_prop() + + if breakdown_select_prop: + all_step_cols.extend(breakdown_select_prop) + + funnel_events_query.select = [*funnel_events_query.select, *all_step_cols] + + if breakdown and breakdownType == BreakdownType.COHORT: + assert funnel_events_query.select_from is not None + funnel_events_query.select_from.next_join = self._get_cohort_breakdown_join() + + if not skip_step_filter: + assert isinstance(funnel_events_query.where, ast.Expr) + steps_conditions = self._get_steps_conditions_for_udf(all_exclusions, length=len(entities_to_use)) + funnel_events_query.where = ast.And(exprs=[funnel_events_query.where, steps_conditions]) + + if breakdown and breakdownAttributionType != BreakdownAttributionType.ALL_EVENTS: + # ALL_EVENTS attribution is the old default, which doesn't need the subquery + return self._add_breakdown_attribution_subquery(funnel_events_query) + + return funnel_events_query + + def _get_exclusions_col( + self, + exclusions: list[ExclusionEntityNode], + index: int, + entity_name: str, + ) -> ast.Expr: + if not exclusions: + return parse_expr(f"0 as exclusion_{index}") + + conditions = [self._build_step_query(exclusion, index, entity_name, "") for exclusion in exclusions] + return parse_expr( + f"if({{condition}}, 1, 0) as exclusion_{index}", placeholders={"condition": ast.Or(exprs=conditions)} + ) + def _get_cohort_breakdown_join(self) -> ast.JoinExpr: breakdown = self.context.breakdown @@ -545,12 +634,23 @@ def _get_steps_conditions(self, length: int) -> ast.Expr: return ast.Or(exprs=step_conditions) + def _get_steps_conditions_for_udf(self, exclusions, length: int) -> ast.Expr: + step_conditions: list[ast.Expr] = [] + + for index in range(length): + step_conditions.append(parse_expr(f"step_{index} = 1")) + if exclusions[index]: + step_conditions.append(parse_expr(f"exclusion_{index} = 1")) + + return ast.Or(exprs=step_conditions) + def _get_step_col( self, entity: EntityNode | ExclusionEntityNode, index: int, entity_name: str, step_prefix: str = "", + for_udf: bool = False, ) -> list[ast.Expr]: # step prefix is used to distinguish actual steps, and exclusion steps # without the prefix, we get the same parameter binding for both, which borks things up @@ -559,9 +659,10 @@ def _get_step_col( step_cols.append( parse_expr(f"if({{condition}}, 1, 0) as {step_prefix}step_{index}", placeholders={"condition": condition}) ) - step_cols.append( - parse_expr(f"if({step_prefix}step_{index} = 1, timestamp, null) as {step_prefix}latest_{index}") - ) + if not for_udf: + step_cols.append( + parse_expr(f"if({step_prefix}step_{index} = 1, timestamp, null) as {step_prefix}latest_{index}") + ) for field in self.extra_event_fields_and_properties: step_cols.append( @@ -584,7 +685,9 @@ def _build_step_query( action = Action.objects.get(pk=int(entity.id), team=self.context.team) event_expr = action_to_expr(action) elif isinstance(entity, DataWarehouseNode): - raise NotImplementedError("DataWarehouseNode is not supported in funnels") + raise ValidationError( + "Data warehouse tables are not supported in funnels just yet. For now, please try this funnel without the data warehouse-based step." + ) elif entity.event is None: # all events event_expr = ast.Constant(value=1) diff --git a/posthog/hogql_queries/insights/funnels/funnel_event_query.py b/posthog/hogql_queries/insights/funnels/funnel_event_query.py index 8acb0f7dea87b..c4cb9507534ef 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_event_query.py +++ b/posthog/hogql_queries/insights/funnels/funnel_event_query.py @@ -7,7 +7,13 @@ from posthog.hogql_queries.utils.query_date_range import QueryDateRange from posthog.models.action.action import Action from posthog.models.property.property import PropertyName -from posthog.schema import ActionsNode, EventsNode, FunnelExclusionActionsNode, FunnelExclusionEventsNode +from posthog.schema import ( + ActionsNode, + DataWarehouseNode, + EventsNode, + FunnelExclusionActionsNode, + FunnelExclusionEventsNode, +) from rest_framework.exceptions import ValidationError @@ -143,6 +149,8 @@ def _entity_expr(self, skip_entity_filter: bool) -> ast.Expr | None: events.update(action.get_step_events()) except Action.DoesNotExist: raise ValidationError(f"Action ID {node.id} does not exist!") + elif isinstance(node, DataWarehouseNode): + continue # Data warehouse nodes aren't based on events else: raise ValidationError("Series and exclusions must be compose of action and event nodes") diff --git a/posthog/hogql_queries/insights/funnels/funnel_query_context.py b/posthog/hogql_queries/insights/funnels/funnel_query_context.py index 8c280e272dbe3..ef6cf57af9563 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_query_context.py +++ b/posthog/hogql_queries/insights/funnels/funnel_query_context.py @@ -2,7 +2,6 @@ from posthog.hogql.constants import LimitContext from posthog.hogql.timings import HogQLTimings from posthog.hogql_queries.insights.query_context import QueryContext -from posthog.models.filters.mixins.utils import cached_property from posthog.models.property.util import box_value from posthog.models.team.team import Team from posthog.schema import ( @@ -39,6 +38,8 @@ class FunnelQueryContext(QueryContext): includeProperties: list[str] includeFinalMatchingEvents: Optional[bool] + max_steps_override: int | None = None + def __init__( self, query: FunnelsQuery, @@ -105,6 +106,8 @@ def __init__( self.actorsQuery = None - @cached_property + @property def max_steps(self) -> int: + if self.max_steps_override is not None: + return self.max_steps_override return len(self.query.series) diff --git a/posthog/hogql_queries/insights/funnels/funnel_time_to_convert.py b/posthog/hogql_queries/insights/funnels/funnel_time_to_convert.py index 38600dcc1fec6..5c28697b6edab 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_time_to_convert.py +++ b/posthog/hogql_queries/insights/funnels/funnel_time_to_convert.py @@ -16,7 +16,8 @@ def __init__( ): super().__init__(context) - self.funnel_order = get_funnel_order_class(self.context.funnelsFilter)(context=self.context) + # Haven't implemented calls for time_to_convert in UDF yet + self.funnel_order = get_funnel_order_class(self.context.funnelsFilter, use_udf=False)(context=self.context) def _format_results(self, results: list) -> FunnelTimeToConvertResults: return FunnelTimeToConvertResults( diff --git a/posthog/hogql_queries/insights/funnels/funnel_trends.py b/posthog/hogql_queries/insights/funnels/funnel_trends.py index ad7a67d1207d6..8bdab281147a9 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_trends.py +++ b/posthog/hogql_queries/insights/funnels/funnel_trends.py @@ -120,16 +120,15 @@ def _format_single_summary(self, summary): labels.append(timestamp.strftime(HUMAN_READABLE_TIMESTAMP_FORMAT)) return {"count": count, "data": data, "days": days, "labels": labels} - def get_query(self) -> ast.SelectQuery: - team, interval, query, now = self.context.team, self.context.interval, self.context.query, self.context.now - - date_range = QueryDateRange( - date_range=query.dateRange, - team=team, - interval=query.interval, - now=now, + def _date_range(self): + return QueryDateRange( + date_range=self.context.query.dateRange, + team=self.context.team, + interval=self.context.query.interval, + now=self.context.now, ) + def get_query(self) -> ast.SelectQuery: step_counts = self.get_step_counts_without_aggregation_query() # Expects multiple rows for same person, first event time, steps taken. @@ -138,12 +137,6 @@ def get_query(self) -> ast.SelectQuery: reached_to_step_count_condition, _, ) = self.get_steps_reached_conditions() - interval_func = get_interval_func_ch(interval.value) - - if date_range.date_from() is None: - _date_from = get_earliest_timestamp(team.pk) - else: - _date_from = date_range.date_from() breakdown_clause = self._get_breakdown_prop_expr() @@ -154,52 +147,12 @@ def get_query(self) -> ast.SelectQuery: *breakdown_clause, ] - formatted_date_from = (_date_from.strftime("%Y-%m-%d %H:%M:%S"),) - formatted_date_to = (date_range.date_to().strftime("%Y-%m-%d %H:%M:%S"),) - date_from_as_hogql = ast.Call( - name="assumeNotNull", - args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=formatted_date_from))])], - ) - date_to_as_hogql = ast.Call( - name="assumeNotNull", - args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=formatted_date_to))])], - ) data_select_from = ast.JoinExpr(table=step_counts) data_group_by: list[ast.Expr] = [ast.Field(chain=["entrance_period_start"]), *breakdown_clause] data_query = ast.SelectQuery(select=data_select, select_from=data_select_from, group_by=data_group_by) - fill_select: list[ast.Expr] = [ - ast.Alias( - alias="entrance_period_start", - expr=ast.ArithmeticOperation( - left=get_start_of_interval_hogql(interval.value, team=team, source=date_from_as_hogql), - right=ast.Call(name=interval_func, args=[ast.Field(chain=["number"])]), - op=ast.ArithmeticOperationOp.Add, - ), - ), - ] - fill_select_from = ast.JoinExpr( - table=ast.Field(chain=["numbers"]), - table_args=[ - ast.ArithmeticOperation( - left=ast.Call( - name="dateDiff", - args=[ - ast.Constant(value=interval.value), - get_start_of_interval_hogql(interval.value, team=team, source=date_from_as_hogql), - get_start_of_interval_hogql(interval.value, team=team, source=date_to_as_hogql), - ], - ), - right=ast.Constant(value=1), - op=ast.ArithmeticOperationOp.Add, - ) - ], - alias="period_offsets", - ) - fill_query = ast.SelectQuery( - select=fill_select, - select_from=fill_select_from, - ) + fill_query = self._get_fill_query() + fill_join = ast.JoinExpr( table=data_query, alias="data", @@ -254,7 +207,7 @@ def get_query(self) -> ast.SelectQuery: ) breakdown_limit = self.get_breakdown_limit() if breakdown_limit: - limit = min(breakdown_limit * len(date_range.all_values()), limit) + limit = min(breakdown_limit * len(self._date_range().all_values()), limit) else: select = [ ast.Field(chain=["fill", "entrance_period_start"]), @@ -276,6 +229,63 @@ def get_query(self) -> ast.SelectQuery: limit=ast.Constant(value=limit), # increased limit (default 100) for hourly breakdown ) + # The fill query returns all the start_interval dates in the response + def _get_fill_query(self) -> ast.SelectQuery: + team, interval = self.context.team, self.context.interval + + date_range = self._date_range() + + if date_range.date_from() is None: + _date_from = get_earliest_timestamp(team.pk) + else: + _date_from = date_range.date_from() + + formatted_date_from = (_date_from.strftime("%Y-%m-%d %H:%M:%S"),) + formatted_date_to = (date_range.date_to().strftime("%Y-%m-%d %H:%M:%S"),) + date_from_as_hogql = ast.Call( + name="assumeNotNull", + args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=formatted_date_from))])], + ) + date_to_as_hogql = ast.Call( + name="assumeNotNull", + args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=formatted_date_to))])], + ) + interval_func = get_interval_func_ch(interval.value) + + fill_select: list[ast.Expr] = [ + ast.Alias( + alias="entrance_period_start", + expr=ast.ArithmeticOperation( + left=get_start_of_interval_hogql(interval.value, team=team, source=date_from_as_hogql), + right=ast.Call(name=interval_func, args=[ast.Field(chain=["number"])]), + op=ast.ArithmeticOperationOp.Add, + ), + ), + ] + fill_select_from = ast.JoinExpr( + table=ast.Field(chain=["numbers"]), + table_args=[ + ast.ArithmeticOperation( + left=ast.Call( + name="dateDiff", + args=[ + ast.Constant(value=interval.value), + get_start_of_interval_hogql(interval.value, team=team, source=date_from_as_hogql), + get_start_of_interval_hogql(interval.value, team=team, source=date_to_as_hogql), + ], + ), + right=ast.Constant(value=1), + op=ast.ArithmeticOperationOp.Add, + ) + ], + alias="period_offsets", + ) + fill_query = ast.SelectQuery( + select=fill_select, + select_from=fill_select_from, + ) + return fill_query + def get_step_counts_without_aggregation_query( self, *, specific_entrance_period_start: Optional[datetime] = None ) -> ast.SelectQuery: diff --git a/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py b/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py new file mode 100644 index 0000000000000..2f928e014daf4 --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py @@ -0,0 +1,163 @@ +from typing import cast + +from posthog.hogql import ast +from posthog.hogql.constants import HogQLQuerySettings +from posthog.hogql.parser import parse_select +from posthog.hogql_queries.insights.funnels import FunnelTrends +from posthog.hogql_queries.insights.utils.utils import get_start_of_interval_hogql_str +from posthog.schema import BreakdownType, BreakdownAttributionType +from posthog.utils import DATERANGE_MAP + +TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S" +HUMAN_READABLE_TIMESTAMP_FORMAT = "%-d-%b-%Y" + + +class FunnelTrendsUDF(FunnelTrends): + def get_step_counts_query(self): + max_steps = self.context.max_steps + return self._get_step_counts_query( + outer_select=[ + *self._get_matching_event_arrays(max_steps), + ], + inner_select=[ + *self._get_matching_events(max_steps), + ], + ) + + def conversion_window_limit(self) -> int: + return int( + self.context.funnelWindowInterval * DATERANGE_MAP[self.context.funnelWindowIntervalUnit].total_seconds() + ) + + def get_query(self) -> ast.SelectQuery: + # If they're asking for a "to_step" just truncate the funnel + funnelsFilter = self.context.funnelsFilter + max_steps = self.context.max_steps if funnelsFilter.funnelToStep is None else funnelsFilter.funnelToStep + 1 + self.context.max_steps_override = max_steps + + if self.context.funnelsFilter.funnelOrderType == "strict": + inner_event_query = self._get_inner_event_query_for_udf( + entity_name="events", skip_step_filter=True, skip_entity_filter=True + ) + else: + inner_event_query = self._get_inner_event_query_for_udf(entity_name="events") + + default_breakdown_selector = "[]" if self._query_has_array_breakdown() else "''" + + # stores the steps as an array of integers from 1 to max_steps + # so if the event could be step_0, step_1 or step_4, it looks like [1,2,0,0,5] + + # Each event is going to be a set of steps or it's going to be a set of exclusions. It can't be both. + steps = ",".join([f"{i + 1} * step_{i}" for i in range(self.context.max_steps)]) + + # this will error if they put in a bad exclusion + exclusions = "" + if getattr(self.context.funnelsFilter, "exclusions", None): + exclusions = "".join([f",-{i + 1} * exclusion_{i}" for i in range(1, self.context.max_steps)]) + + # Todo: Make this work for breakdowns + if self.context.breakdownType == BreakdownType.COHORT: + fn = "aggregate_funnel_cohort_trends" + breakdown_prop = ", prop" + elif self._query_has_array_breakdown(): + fn = "aggregate_funnel_array_trends" + breakdown_prop = "" + else: + fn = "aggregate_funnel_trends" + breakdown_prop = "" + + prop_selector = "prop" if self.context.breakdown else default_breakdown_selector + prop_vals = "groupUniqArray(prop)" if self.context.breakdown else f"[{default_breakdown_selector}]" + + breakdown_attribution_string = f"{self.context.breakdownAttributionType}{f'_{self.context.funnelsFilter.breakdownAttributionValue}' if self.context.breakdownAttributionType == BreakdownAttributionType.STEP else ''}" + + from_step = funnelsFilter.funnelFromStep or 0 + + inner_select = cast( + ast.SelectQuery, + parse_select( + f""" + SELECT + arrayJoin({fn}( + {from_step}, + {max_steps}, + {self.conversion_window_limit()}, + '{breakdown_attribution_string}', + '{self.context.funnelsFilter.funnelOrderType}', + {prop_vals}, + arraySort(t -> t.1, groupArray(tuple(toFloat(timestamp), {get_start_of_interval_hogql_str(self.context.interval.value, team=self.context.team, source='timestamp')}, {prop_selector}, arrayFilter((x) -> x != 0, [{steps}{exclusions}])))) + )) as af_tuple, + toTimeZone(af_tuple.1, '{self.context.team.timezone}') as entrance_period_start, + af_tuple.2 as success_bool, + af_tuple.3 as breakdown + FROM {{inner_event_query}} + GROUP BY aggregation_target{breakdown_prop} + """, + {"inner_event_query": inner_event_query}, + ), + ) + # This is necessary so clickhouse doesn't truncate timezone information when passing datetimes to and from python + inner_select.settings = HogQLQuerySettings(date_time_output_format="iso", date_time_input_format="best_effort") + + conversion_rate_expr = ( + "if(reached_from_step_count > 0, round(reached_to_step_count / reached_from_step_count * 100, 2), 0)" + ) + + fill_query = self._get_fill_query() + + limit = 1_000 + if self.context.breakdown: + breakdown_limit = self.get_breakdown_limit() + if breakdown_limit: + limit = min(breakdown_limit * len(self._date_range().all_values()), limit) + + s = parse_select( + f""" + SELECT + fill.entrance_period_start AS entrance_period_start, + sumIf(data.reached_from_step_count, ifNull(equals(fill.entrance_period_start, data.entrance_period_start), isNull(fill.entrance_period_start) and isNull(data.entrance_period_start))) AS reached_from_step_count, + sumIf(data.reached_to_step_count, ifNull(equals(fill.entrance_period_start, data.entrance_period_start), isNull(fill.entrance_period_start) and isNull(data.entrance_period_start))) AS reached_to_step_count, + if(ifNull(greater(reached_from_step_count, 0), 0), round(multiply(divide(reached_to_step_count, reached_from_step_count), 100), 2), 0) AS conversion_rate, + data.prop AS prop + FROM + ({{fill_query}}) as fill + CROSS JOIN (SELECT + entrance_period_start as entrance_period_start, + countIf(success_bool != 0) as reached_from_step_count, + countIf(success_bool = 1) as reached_to_step_count, + breakdown as prop + FROM + ({{inner_select}}) + GROUP BY entrance_period_start, breakdown) as data + GROUP BY + fill.entrance_period_start, + data.prop + ORDER BY + sum(reached_from_step_count) OVER (PARTITION BY data.prop) DESC, + data.prop DESC, + fill.entrance_period_start ASC + LIMIT {limit} + """, + {"fill_query": fill_query, "inner_select": inner_select}, + ) + else: + s = parse_select( + f""" + SELECT + fill.entrance_period_start as entrance_period_start, + countIf(success_bool != 0) as reached_from_step_count, + countIf(success_bool = 1) as reached_to_step_count, + {conversion_rate_expr} as conversion_rate, + breakdown as prop + FROM + ({{inner_select}}) as data + RIGHT OUTER JOIN + ({{fill_query}}) as fill + ON data.entrance_period_start = fill.entrance_period_start + GROUP BY entrance_period_start, data.breakdown + ORDER BY entrance_period_start + LIMIT {limit} + """, + {"fill_query": fill_query, "inner_select": inner_select}, + ) + return cast(ast.SelectQuery, s) diff --git a/posthog/hogql_queries/insights/funnels/funnel_udf.py b/posthog/hogql_queries/insights/funnels/funnel_udf.py new file mode 100644 index 0000000000000..7ec91374dcdee --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/funnel_udf.py @@ -0,0 +1,184 @@ +from typing import cast + +from posthog.hogql import ast +from posthog.hogql.parser import parse_select +from posthog.hogql_queries.insights.funnels.base import FunnelBase +from posthog.schema import BreakdownType, BreakdownAttributionType +from posthog.utils import DATERANGE_MAP + +TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S" +HUMAN_READABLE_TIMESTAMP_FORMAT = "%-d-%b-%Y" + + +class FunnelUDF(FunnelBase): + def get_step_counts_query(self): + max_steps = self.context.max_steps + return self._get_step_counts_query( + outer_select=[ + *self._get_matching_event_arrays(max_steps), + ], + inner_select=[ + *self._get_matching_events(max_steps), + ], + ) + + def conversion_window_limit(self) -> int: + return int( + self.context.funnelWindowInterval * DATERANGE_MAP[self.context.funnelWindowIntervalUnit].total_seconds() + ) + + def get_query(self) -> ast.SelectQuery: + if self.context.funnelsFilter.funnelOrderType == "strict": + inner_event_query = self._get_inner_event_query_for_udf( + entity_name="events", skip_step_filter=True, skip_entity_filter=True + ) + else: + inner_event_query = self._get_inner_event_query_for_udf(entity_name="events") + + default_breakdown_selector = "[]" if self._query_has_array_breakdown() else "''" + + # stores the steps as an array of integers from 1 to max_steps + # so if the event could be step_0, step_1 or step_4, it looks like [1,2,0,0,5] + + # Each event is going to be a set of steps or it's going to be a set of exclusions. It can't be both. + steps = ",".join([f"{i + 1} * step_{i}" for i in range(self.context.max_steps)]) + + # this will error if they put in a bad exclusion + exclusions = "" + if getattr(self.context.funnelsFilter, "exclusions", None): + exclusions = "".join([f",-{i + 1} * exclusion_{i}" for i in range(1, self.context.max_steps)]) + + if self.context.breakdownType == BreakdownType.COHORT: + fn = "aggregate_funnel_cohort" + breakdown_prop = ", prop" + elif self._query_has_array_breakdown(): + fn = "aggregate_funnel_array" + breakdown_prop = "" + else: + fn = "aggregate_funnel" + breakdown_prop = "" + + prop_selector = "prop" if self.context.breakdown else default_breakdown_selector + prop_vals = "groupUniqArray(prop)" if self.context.breakdown else f"[{default_breakdown_selector}]" + + breakdown_attribution_string = f"{self.context.breakdownAttributionType}{f'_{self.context.funnelsFilter.breakdownAttributionValue}' if self.context.breakdownAttributionType == BreakdownAttributionType.STEP else ''}" + + # test + ''' + inner_select = parse_select( + f""" + SELECT + arrayJoin({fn}( + {self.context.max_steps}, + {self.conversion_window_limit()}, + '{breakdown_attribution_string}', + '{self.context.funnelsFilter.funnelOrderType}', + {prop_vals}, + arraySort(t -> t.1, groupArray(tuple(toFloat(timestamp), {prop_selector}, arrayFilter((x) -> x != 0, [{steps}{exclusions}])))) + )) as af_tuple, + af_tuple.1 as af, + af_tuple.2 as breakdown, + af_tuple.3 as timings + FROM {{inner_event_query}} + GROUP BY aggregation_target{breakdown_prop} + HAVING af >= 0 + """, + {"inner_event_query": inner_event_query}, + ) + return inner_select + ''' + + inner_select = parse_select( + f""" + SELECT + arrayJoin({fn}( + {self.context.max_steps}, + {self.conversion_window_limit()}, + '{breakdown_attribution_string}', + '{self.context.funnelsFilter.funnelOrderType}', + {prop_vals}, + arraySort(t -> t.1, groupArray(tuple(toFloat(timestamp), {prop_selector}, arrayFilter((x) -> x != 0, [{steps}{exclusions}])))) + )) as af_tuple, + af_tuple.1 as af, + af_tuple.2 as breakdown, + af_tuple.3 as timings + FROM {{inner_event_query}} + GROUP BY aggregation_target{breakdown_prop} + HAVING af >= 0 + """, + {"inner_event_query": inner_event_query}, + ) + + step_results = ",".join( + [f"countIf(ifNull(equals(af, {i}), 0)) AS step_{i+1}" for i in range(self.context.max_steps)] + ) + step_results2 = ",".join([f"sum(step_{i+1}) AS step_{i+1}" for i in range(self.context.max_steps)]) + + conversion_time_arrays = ",".join( + [ + f"groupArrayIf(timings[{i}], timings[{i}] > 0) AS step_{i}_conversion_times" + for i in range(1, self.context.max_steps) + ] + ) + + order_by = ",".join([f"step_{i+1} DESC" for i in reversed(range(self.context.max_steps))]) + + other_aggregation = "['Other']" if self._query_has_array_breakdown() else "'Other'" + + use_breakdown_limit = self.context.breakdown and self.context.breakdownType in [ + BreakdownType.PERSON, + BreakdownType.EVENT, + BreakdownType.GROUP, + ] + + final_prop = ( + f"if(row_number < {self.get_breakdown_limit()}, breakdown, {other_aggregation})" + if use_breakdown_limit + else "breakdown" + ) + + s = parse_select( + f""" + SELECT + {step_results}, + {conversion_time_arrays}, + rowNumberInBlock() as row_number, + {final_prop} as final_prop + FROM + {{inner_select}} + GROUP BY breakdown + ORDER BY {order_by} + """, + {"inner_select": inner_select}, + ) + + mean_conversion_times = ",".join( + [ + f"arrayMap(x -> if(isNaN(x), NULL, x), [avgArray(step_{i}_conversion_times)])[1] AS step_{i}_average_conversion_time" + for i in range(1, self.context.max_steps) + ] + ) + median_conversion_times = ",".join( + [ + f"arrayMap(x -> if(isNaN(x), NULL, x), [medianArray(step_{i}_conversion_times)])[1] AS step_{i}_median_conversion_time" + for i in range(1, self.context.max_steps) + ] + ) + + # Weird: unless you reference row_number in this outer block, it doesn't work correctly + s = parse_select( + f""" + SELECT + {step_results2}, + {mean_conversion_times}, + {median_conversion_times}, + groupArray(row_number) as row_number, + final_prop + FROM + {{s}} + GROUP BY final_prop + """, + {"s": s}, + ) + + return cast(ast.SelectQuery, s) diff --git a/posthog/hogql_queries/insights/funnels/funnels_query_runner.py b/posthog/hogql_queries/insights/funnels/funnels_query_runner.py index 08c64720a4f9d..ca10680f9f6a1 100644 --- a/posthog/hogql_queries/insights/funnels/funnels_query_runner.py +++ b/posthog/hogql_queries/insights/funnels/funnels_query_runner.py @@ -17,7 +17,9 @@ from posthog.hogql_queries.insights.funnels.funnel_query_context import FunnelQueryContext from posthog.hogql_queries.insights.funnels.funnel_time_to_convert import FunnelTimeToConvert from posthog.hogql_queries.insights.funnels.funnel_trends import FunnelTrends +from posthog.hogql_queries.insights.funnels.funnel_trends_udf import FunnelTrendsUDF from posthog.hogql_queries.insights.funnels.utils import get_funnel_actor_class, get_funnel_order_class +from posthog.hogql_queries.legacy_compatibility.feature_flag import insight_funnels_use_udf from posthog.hogql_queries.query_runner import QueryRunner from posthog.hogql_queries.utils.query_date_range import QueryDateRange from posthog.models import Team @@ -28,6 +30,7 @@ FunnelsQuery, FunnelsQueryResponse, HogQLQueryModifiers, + StepOrderValue, ) @@ -102,18 +105,28 @@ def calculate(self): if response.timings is not None: timings.extend(response.timings) - return FunnelsQueryResponse(results=results, timings=timings, hogql=hogql, modifiers=self.modifiers) + return FunnelsQueryResponse( + isUdf=self._use_udf, results=results, timings=timings, hogql=hogql, modifiers=self.modifiers + ) + + @cached_property + def _use_udf(self): + return self.context.funnelsFilter.useUdf or insight_funnels_use_udf(self.team) @cached_property def funnel_order_class(self): - return get_funnel_order_class(self.context.funnelsFilter)(context=self.context) + return get_funnel_order_class(self.context.funnelsFilter, use_udf=self._use_udf)(context=self.context) @cached_property def funnel_class(self): funnelVizType = self.context.funnelsFilter.funnelVizType if funnelVizType == FunnelVizType.TRENDS: - return FunnelTrends(context=self.context, **self.kwargs) + return ( + FunnelTrendsUDF(context=self.context, **self.kwargs) + if self._use_udf and self.context.funnelsFilter.funnelOrderType != StepOrderValue.UNORDERED + else FunnelTrends(context=self.context, **self.kwargs) + ) elif funnelVizType == FunnelVizType.TIME_TO_CONVERT: return FunnelTimeToConvert(context=self.context) else: diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_breakdowns_by_current_url.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_breakdowns_by_current_url.ambr deleted file mode 100644 index 142118fb7056f..0000000000000 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_breakdowns_by_current_url.ambr +++ /dev/null @@ -1,213 +0,0 @@ -# serializer version: 1 -# name: TestBreakdownsByCurrentURL.test_breakdown_by_current_url - ''' - SELECT [if(empty(replaceRegexpOne(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), ''), '[\\/?#]*$', '')), '/', replaceRegexpOne(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), ''), '[\\/?#]*$', ''))] AS value, - count(*) AS count - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), in(e.event, tuple('terminate funnel', 'watched movie')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-02 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-12 23:59:59.999999', 6, 'UTC'))), notEmpty(e__pdi.person_id)) - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 - ''' -# --- -# name: TestBreakdownsByCurrentURL.test_breakdown_by_current_url.1 - ''' - SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, - countIf(ifNull(equals(steps, 2), 0)) AS step_2, - avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time, - median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - max(steps) OVER (PARTITION BY aggregation_target, - prop) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop AS prop, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - if(has([['https://example.com/home'], ['https://example.com'], ['/']], prop), prop, ['Other']) AS prop - FROM - (SELECT timestamp AS timestamp, - aggregation_target AS aggregation_target, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop_basic AS prop_basic, - prop, - prop_vals AS prop_vals, - if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'watched movie'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'terminate funnel'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - [if(empty(replaceRegexpOne(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), ''), '[\\/?#]*$', '')), '/', replaceRegexpOne(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), ''), '[\\/?#]*$', ''))] AS prop_basic, - prop_basic AS prop, - argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-02 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-12 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('terminate funnel', 'watched movie'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps, - prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) - GROUP BY prop - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 - ''' -# --- -# name: TestBreakdownsByCurrentURL.test_breakdown_by_pathname - ''' - SELECT [if(empty(replaceRegexpOne(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$pathname'), ''), 'null'), '^"|"$', ''), ''), '[\\/?#]*$', '')), '/', replaceRegexpOne(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$pathname'), ''), 'null'), '^"|"$', ''), ''), '[\\/?#]*$', ''))] AS value, - count(*) AS count - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), in(e.event, tuple('terminate funnel', 'watched movie')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-02 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-12 23:59:59.999999', 6, 'UTC'))), notEmpty(e__pdi.person_id)) - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 - ''' -# --- -# name: TestBreakdownsByCurrentURL.test_breakdown_by_pathname.1 - ''' - SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, - countIf(ifNull(equals(steps, 2), 0)) AS step_2, - avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time, - median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - max(steps) OVER (PARTITION BY aggregation_target, - prop) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop AS prop, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - if(has([['/'], ['/home']], prop), prop, ['Other']) AS prop - FROM - (SELECT timestamp AS timestamp, - aggregation_target AS aggregation_target, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop_basic AS prop_basic, - prop, - prop_vals AS prop_vals, - if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'watched movie'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'terminate funnel'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - [if(empty(replaceRegexpOne(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$pathname'), ''), 'null'), '^"|"$', ''), ''), '[\\/?#]*$', '')), '/', replaceRegexpOne(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$pathname'), ''), 'null'), '^"|"$', ''), ''), '[\\/?#]*$', ''))] AS prop_basic, - prop_basic AS prop, - argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-02 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-12 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('terminate funnel', 'watched movie'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps, - prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) - GROUP BY prop - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 - ''' -# --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr index 7e7d0e0586b32..8e77766f6667d 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr @@ -1,4 +1,1131 @@ # serializer version: 1 +# name: BaseTestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_2 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_2 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop_basic AS prop_basic, + prop_0 AS prop_0, + prop_1 AS prop_1, + prop, + prop_vals AS prop_vals, + prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0, + if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1, + prop_1 AS prop, + groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))) ARRAY + JOIN prop_vals AS prop + WHERE ifNull(notEquals(prop, []), isNotNull(prop) + or isNotNull([])))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelStrictStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_2 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['', '']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'buy'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), ''), ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(avgArrayOrNull(step_2_conversion_time_array) AS inter_2_conversion), NULL, inter_2_conversion) AS step_2_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(isNaN(medianArrayOrNull(step_2_conversion_time_array) AS inter_2_median), NULL, inter_2_median) AS step_2_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, 'Other') AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + countIf(ifNull(equals(steps, 3), 0)) AS step_3, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + groupArray(step_2_conversion_time) AS step_2_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_3 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time, + min(step_2_conversion_time) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2 + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(avgArrayOrNull(step_2_conversion_time_array) AS inter_2_conversion), NULL, inter_2_conversion) AS step_2_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(isNaN(medianArrayOrNull(step_2_conversion_time_array) AS inter_2_median), NULL, inter_2_median) AS step_2_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, 'Other') AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + countIf(ifNull(equals(steps, 3), 0)) AS step_3, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + groupArray(step_2_conversion_time) AS step_2_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_3 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time, + min(step_2_conversion_time) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(avgArrayOrNull(step_2_conversion_time_array) AS inter_2_conversion), NULL, inter_2_conversion) AS step_2_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(isNaN(medianArrayOrNull(step_2_conversion_time_array) AS inter_2_median), NULL, inter_2_median) AS step_2_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, 'Other') AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + countIf(ifNull(equals(steps, 3), 0)) AS step_3, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + groupArray(step_2_conversion_time) AS step_2_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_3 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time, + min(step_2_conversion_time) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.1 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.2 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.3 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.4 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.5 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.6 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.7 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.8 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- # name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen ''' SELECT sum(step_1) AS step_1, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr new file mode 100644 index 0000000000000..837c953bc93f5 --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr @@ -0,0 +1,2066 @@ +# serializer version: 1 +# name: BaseTestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_2 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_2 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop_basic AS prop_basic, + prop_0 AS prop_0, + prop_1 AS prop_1, + prop, + prop_vals AS prop_vals, + prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0, + if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1, + prop_1 AS prop, + groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))) ARRAY + JOIN prop_vals AS prop + WHERE ifNull(notEquals(prop, []), isNotNull(prop) + or isNotNull([])))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelStrictStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_2 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['', '']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'buy'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), ''), ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(avgArrayOrNull(step_2_conversion_time_array) AS inter_2_conversion), NULL, inter_2_conversion) AS step_2_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(isNaN(medianArrayOrNull(step_2_conversion_time_array) AS inter_2_median), NULL, inter_2_median) AS step_2_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, 'Other') AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + countIf(ifNull(equals(steps, 3), 0)) AS step_3, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + groupArray(step_2_conversion_time) AS step_2_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_3 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time, + min(step_2_conversion_time) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2 + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(avgArrayOrNull(step_2_conversion_time_array) AS inter_2_conversion), NULL, inter_2_conversion) AS step_2_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(isNaN(medianArrayOrNull(step_2_conversion_time_array) AS inter_2_median), NULL, inter_2_median) AS step_2_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, 'Other') AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + countIf(ifNull(equals(steps, 3), 0)) AS step_3, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + groupArray(step_2_conversion_time) AS step_2_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_3 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time, + min(step_2_conversion_time) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(avgArrayOrNull(step_2_conversion_time_array) AS inter_2_conversion), NULL, inter_2_conversion) AS step_2_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(isNaN(medianArrayOrNull(step_2_conversion_time_array) AS inter_2_median), NULL, inter_2_median) AS step_2_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, 'Other') AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + countIf(ifNull(equals(steps, 3), 0)) AS step_3, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + groupArray(step_2_conversion_time) AS step_2_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_3 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time, + min(step_2_conversion_time) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.1 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.2 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.3 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.4 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.5 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.6 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.7 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.8 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC')))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'step_1', 'strict', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + prop_basic AS prop_basic, + prop_0 AS prop_0, + prop_1 AS prop_1, + prop, + prop_vals AS prop_vals, + prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0, + if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1, + prop_1 AS prop, + groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))) ARRAY + JOIN prop_vals AS prop + WHERE ifNull(notEquals(prop, []), isNotNull(prop) + or isNotNull([]))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelStrictStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['', '']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'buy'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), ''), ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC')))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + exclusion_2 AS exclusion_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC')))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2 + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + exclusion_2 AS exclusion_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC')))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + exclusion_2 AS exclusion_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC')))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.1 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.2 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.3 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.4 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.5 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.6 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.7 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.8 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends.ambr index 7267c85d00c1b..cfbc8d8a68362 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends.ambr @@ -1,4 +1,366 @@ # serializer version: 1 +# name: BaseTestFunnelTrends.test_timezones_trends + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + data.reached_from_step_count AS reached_from_step_count, + data.reached_to_step_count AS reached_to_step_count, + if(ifNull(greater(data.reached_from_step_count, 0), 0), round(multiply(divide(data.reached_to_step_count, data.reached_from_step_count), 100), 2), 0) AS conversion_rate + FROM + (SELECT plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'UTC'))), toIntervalDay(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'UTC')))), 1)) AS period_offsets) AS fill + LEFT OUTER JOIN + (SELECT entrance_period_start AS entrance_period_start, + countIf(ifNull(greaterOrEquals(steps_completed, 1), 0)) AS reached_from_step_count, + countIf(ifNull(greaterOrEquals(steps_completed, 3), 0)) AS reached_to_step_count + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfDay(timestamp) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-04-30 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + GROUP BY aggregation_target, + entrance_period_start) + GROUP BY entrance_period_start) AS data ON equals(data.entrance_period_start, fill.entrance_period_start) + ORDER BY fill.entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelTrends.test_timezones_trends.1 + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + data.reached_from_step_count AS reached_from_step_count, + data.reached_to_step_count AS reached_to_step_count, + if(ifNull(greater(data.reached_from_step_count, 0), 0), round(multiply(divide(data.reached_to_step_count, data.reached_from_step_count), 100), 2), 0) AS conversion_rate + FROM + (SELECT plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'US/Pacific'))), toIntervalDay(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'US/Pacific'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'US/Pacific')))), 1)) AS period_offsets) AS fill + LEFT OUTER JOIN + (SELECT entrance_period_start AS entrance_period_start, + countIf(ifNull(greaterOrEquals(steps_completed, 1), 0)) AS reached_from_step_count, + countIf(ifNull(greaterOrEquals(steps_completed, 3), 0)) AS reached_to_step_count + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfDay(timestamp) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'US/Pacific') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2021-04-30 00:00:00.000000', 6, 'US/Pacific')), lessOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'US/Pacific'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + GROUP BY aggregation_target, + entrance_period_start) + GROUP BY entrance_period_start) AS data ON equals(data.entrance_period_start, fill.entrance_period_start) + ORDER BY fill.entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelTrends.test_week_interval + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + data.reached_from_step_count AS reached_from_step_count, + data.reached_to_step_count AS reached_to_step_count, + if(ifNull(greater(data.reached_from_step_count, 0), 0), round(multiply(divide(data.reached_to_step_count, data.reached_from_step_count), 100), 2), 0) AS conversion_rate + FROM + (SELECT plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-01 00:00:00'), 6, 'UTC')), 0), toIntervalWeek(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-01 00:00:00'), 6, 'UTC')), 0), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'UTC')), 0)), 1)) AS period_offsets) AS fill + LEFT OUTER JOIN + (SELECT entrance_period_start AS entrance_period_start, + countIf(ifNull(greaterOrEquals(steps_completed, 1), 0)) AS reached_from_step_count, + countIf(ifNull(greaterOrEquals(steps_completed, 3), 0)) AS reached_to_step_count + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfWeek(timestamp, 0) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + GROUP BY aggregation_target, + entrance_period_start) + GROUP BY entrance_period_start) AS data ON equals(data.entrance_period_start, fill.entrance_period_start) + ORDER BY fill.entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelTrends.test_week_interval.1 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + toStartOfWeek(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC'), 0) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'step one', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'step two', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'step three', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2 + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['step one', 'step three', 'step two'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2021-05-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2021-05-07 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 2 + AND event IN ['step one', 'step three', 'step two'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2021-05-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2021-05-07 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) )))) + WHERE step_0 = 1 ) + WHERE toDateTime(entrance_period_start) = '2021-04-25 00:00:00' + GROUP BY aggregation_target, + entrance_period_start) + WHERE steps_completed >= 3 + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- # name: TestFunnelTrends.test_timezones_trends ''' SELECT fill.entrance_period_start AS entrance_period_start, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr new file mode 100644 index 0000000000000..0c52cf349a36b --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr @@ -0,0 +1,602 @@ +# serializer version: 1 +# name: BaseTestFunnelTrends.test_timezones_trends + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + data.reached_from_step_count AS reached_from_step_count, + data.reached_to_step_count AS reached_to_step_count, + if(ifNull(greater(data.reached_from_step_count, 0), 0), round(multiply(divide(data.reached_to_step_count, data.reached_from_step_count), 100), 2), 0) AS conversion_rate + FROM + (SELECT plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'UTC'))), toIntervalDay(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'UTC')))), 1)) AS period_offsets) AS fill + LEFT OUTER JOIN + (SELECT entrance_period_start AS entrance_period_start, + countIf(ifNull(greaterOrEquals(steps_completed, 1), 0)) AS reached_from_step_count, + countIf(ifNull(greaterOrEquals(steps_completed, 3), 0)) AS reached_to_step_count + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfDay(timestamp) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-04-30 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + GROUP BY aggregation_target, + entrance_period_start) + GROUP BY entrance_period_start) AS data ON equals(data.entrance_period_start, fill.entrance_period_start) + ORDER BY fill.entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelTrends.test_timezones_trends.1 + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + data.reached_from_step_count AS reached_from_step_count, + data.reached_to_step_count AS reached_to_step_count, + if(ifNull(greater(data.reached_from_step_count, 0), 0), round(multiply(divide(data.reached_to_step_count, data.reached_from_step_count), 100), 2), 0) AS conversion_rate + FROM + (SELECT plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'US/Pacific'))), toIntervalDay(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'US/Pacific'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'US/Pacific')))), 1)) AS period_offsets) AS fill + LEFT OUTER JOIN + (SELECT entrance_period_start AS entrance_period_start, + countIf(ifNull(greaterOrEquals(steps_completed, 1), 0)) AS reached_from_step_count, + countIf(ifNull(greaterOrEquals(steps_completed, 3), 0)) AS reached_to_step_count + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfDay(timestamp) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'US/Pacific') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2021-04-30 00:00:00.000000', 6, 'US/Pacific')), lessOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'US/Pacific'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + GROUP BY aggregation_target, + entrance_period_start) + GROUP BY entrance_period_start) AS data ON equals(data.entrance_period_start, fill.entrance_period_start) + ORDER BY fill.entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelTrends.test_week_interval + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + data.reached_from_step_count AS reached_from_step_count, + data.reached_to_step_count AS reached_to_step_count, + if(ifNull(greater(data.reached_from_step_count, 0), 0), round(multiply(divide(data.reached_to_step_count, data.reached_from_step_count), 100), 2), 0) AS conversion_rate + FROM + (SELECT plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-01 00:00:00'), 6, 'UTC')), 0), toIntervalWeek(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-01 00:00:00'), 6, 'UTC')), 0), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'UTC')), 0)), 1)) AS period_offsets) AS fill + LEFT OUTER JOIN + (SELECT entrance_period_start AS entrance_period_start, + countIf(ifNull(greaterOrEquals(steps_completed, 1), 0)) AS reached_from_step_count, + countIf(ifNull(greaterOrEquals(steps_completed, 3), 0)) AS reached_to_step_count + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfWeek(timestamp, 0) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + GROUP BY aggregation_target, + entrance_period_start) + GROUP BY entrance_period_start) AS data ON equals(data.entrance_period_start, fill.entrance_period_start) + ORDER BY fill.entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelTrends.test_week_interval.1 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + toStartOfWeek(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC'), 0) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'step one', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'step two', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'step three', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2 + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['step one', 'step three', 'step two'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2021-05-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2021-05-07 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 2 + AND event IN ['step one', 'step three', 'step two'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2021-05-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2021-05-07 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) )))) + WHERE step_0 = 1 ) + WHERE toDateTime(entrance_period_start) = '2021-04-25 00:00:00' + GROUP BY aggregation_target, + entrance_period_start) + WHERE steps_completed >= 3 + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestFunnelTrendsUDF.test_timezones_trends + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + countIf(ifNull(notEquals(data.success_bool, 0), 1)) AS reached_from_step_count, + countIf(ifNull(equals(data.success_bool, 1), 0)) AS reached_to_step_count, + if(ifNull(greater(reached_from_step_count, 0), 0), round(multiply(divide(reached_to_step_count, reached_from_step_count), 100), 2), 0) AS conversion_rate, + data.breakdown AS prop + FROM + (SELECT arrayJoin(aggregate_funnel_array_trends(0, 3, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toStartOfDay(timestamp), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + toTimeZone(af_tuple.1, 'UTC') AS entrance_period_start, + af_tuple.2 AS success_bool, + af_tuple.3 AS breakdown + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-04-30 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))) + GROUP BY aggregation_target SETTINGS date_time_output_format='iso', + date_time_input_format='best_effort') AS data + RIGHT OUTER JOIN + (SELECT plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'UTC'))), toIntervalDay(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'UTC')))), 1)) AS period_offsets) AS fill ON equals(data.entrance_period_start, fill.entrance_period_start) + GROUP BY entrance_period_start, + data.breakdown + ORDER BY entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTrendsUDF.test_timezones_trends.1 + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + countIf(ifNull(notEquals(data.success_bool, 0), 1)) AS reached_from_step_count, + countIf(ifNull(equals(data.success_bool, 1), 0)) AS reached_to_step_count, + if(ifNull(greater(reached_from_step_count, 0), 0), round(multiply(divide(reached_to_step_count, reached_from_step_count), 100), 2), 0) AS conversion_rate, + data.breakdown AS prop + FROM + (SELECT arrayJoin(aggregate_funnel_array_trends(0, 3, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toStartOfDay(timestamp), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + toTimeZone(af_tuple.1, 'US/Pacific') AS entrance_period_start, + af_tuple.2 AS success_bool, + af_tuple.3 AS breakdown + FROM + (SELECT toTimeZone(e.timestamp, 'US/Pacific') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2021-04-30 00:00:00.000000', 6, 'US/Pacific')), lessOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'US/Pacific'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))) + GROUP BY aggregation_target SETTINGS date_time_output_format='iso', + date_time_input_format='best_effort') AS data + RIGHT OUTER JOIN + (SELECT plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'US/Pacific'))), toIntervalDay(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'US/Pacific'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'US/Pacific')))), 1)) AS period_offsets) AS fill ON equals(data.entrance_period_start, fill.entrance_period_start) + GROUP BY entrance_period_start, + data.breakdown + ORDER BY entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTrendsUDF.test_week_interval + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + countIf(ifNull(notEquals(data.success_bool, 0), 1)) AS reached_from_step_count, + countIf(ifNull(equals(data.success_bool, 1), 0)) AS reached_to_step_count, + if(ifNull(greater(reached_from_step_count, 0), 0), round(multiply(divide(reached_to_step_count, reached_from_step_count), 100), 2), 0) AS conversion_rate, + data.breakdown AS prop + FROM + (SELECT arrayJoin(aggregate_funnel_array_trends(0, 3, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toStartOfWeek(timestamp, 0), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + toTimeZone(af_tuple.1, 'UTC') AS entrance_period_start, + af_tuple.2 AS success_bool, + af_tuple.3 AS breakdown + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))) + GROUP BY aggregation_target SETTINGS date_time_output_format='iso', + date_time_input_format='best_effort') AS data + RIGHT OUTER JOIN + (SELECT plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-01 00:00:00'), 6, 'UTC')), 0), toIntervalWeek(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-01 00:00:00'), 6, 'UTC')), 0), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'UTC')), 0)), 1)) AS period_offsets) AS fill ON equals(data.entrance_period_start, fill.entrance_period_start) + GROUP BY entrance_period_start, + data.breakdown + ORDER BY entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTrendsUDF.test_week_interval.1 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + toStartOfWeek(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC'), 0) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'step one', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'step two', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'step three', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2 + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['step one', 'step three', 'step two'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2021-05-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2021-05-07 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 2 + AND event IN ['step one', 'step three', 'step two'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2021-05-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2021-05-07 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) )))) + WHERE step_0 = 1 ) + WHERE toDateTime(entrance_period_start) = '2021-04-25 00:00:00' + GROUP BY aggregation_target, + entrance_period_start) + WHERE steps_completed >= 3 + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr new file mode 100644 index 0000000000000..e8487d4a42a9e --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr @@ -0,0 +1,1887 @@ +# serializer version: 1 +# name: TestFOSSFunnelUDF.test_funnel_conversion_window_seconds + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + breakdown AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(3, 15, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_conversion_window_seconds.1 + ''' + SELECT persons.id, + persons.id AS id, + persons.created_at AS created_at, + 1 + FROM + (SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, + avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, + median(step_1_conversion_time) AS step_1_median_conversion_time_inner, + median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalSecond(15))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalSecond(15))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalSecond(15))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalSecond(15))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalSecond(15))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + WHERE ifNull(in(steps, [2, 3]), 0) + ORDER BY aggregation_target ASC) AS source + INNER JOIN + (SELECT argMax(toTimeZone(person.created_at, 'UTC'), person.version) AS created_at, + person.id AS id + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) + ORDER BY persons.created_at DESC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_events_with_person_on_events_v2 + ''' + + SELECT DISTINCT person_id + FROM events + WHERE team_id = 2 + AND distinct_id = 'stopped_after_pay' + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_events_with_person_on_events_v2.1 + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + breakdown AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(3, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, + if(equals(e.event, 'user signed up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, '$autocapture'), match(e.elements_chain, '(^|;)button(\\.|$|;|:)'), arrayExists(x -> ifNull(equals(x, 'Pay $10'), 0), e.elements_chain_texts)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(and(equals(e.event, '$autocapture'), match(e.elements_chain, '(^|;)a(\\.|$|;|:)'), equals(e.elements_chain_href, '/movie')), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2 + FROM events AS e + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 2) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2011-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2012-01-01 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$autocapture', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_with_precalculated_cohort_step_filter + ''' + + SELECT count(DISTINCT person_id) + FROM cohortpeople + WHERE team_id = 2 + AND cohort_id = 2 + AND version = NULL + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_with_precalculated_cohort_step_filter.1 + ''' + /* cohort_calculation: */ + SELECT count(DISTINCT person_id) + FROM cohortpeople + WHERE team_id = 2 + AND cohort_id = 2 + AND version = 0 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_with_precalculated_cohort_step_filter.2 + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + breakdown AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(and(equals(e.event, 'user signed up'), ifNull(in(e__pdi.person_id, + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 2), equals(cohortpeople.version, 0)))), 0)), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'paid'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_with_property_groups + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + breakdown AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(3, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'user signed up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha.com'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha2.com'), 0)), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + INNER JOIN + (SELECT person.id AS id, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'age'), ''), 'null'), '^"|"$', '') AS properties___age + FROM person + WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-07-01 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'user signed up')), or(and(ifNull(ilike(e__pdi__person.properties___email, '%.com%'), 0), ifNull(equals(e__pdi__person.properties___age, '20'), 0)), or(ifNull(ilike(e__pdi__person.properties___email, '%.org%'), 0), ifNull(equals(e__pdi__person.properties___age, '28'), 0)))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_with_property_groups.1 + ''' + SELECT persons.id, + persons.id AS id, + persons.created_at AS created_at, + 1 + FROM + (SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, + avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, + median(step_1_conversion_time) AS step_1_median_conversion_time_inner, + median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'user signed up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha.com'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha2.com'), 0)), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + INNER JOIN + (SELECT person.id AS id, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'age'), ''), 'null'), '^"|"$', '') AS properties___age + FROM person + WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-07-01 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'user signed up')), or(and(ifNull(ilike(e__pdi__person.properties___email, '%.com%'), 0), ifNull(equals(e__pdi__person.properties___age, '20'), 0)), or(ifNull(ilike(e__pdi__person.properties___email, '%.org%'), 0), ifNull(equals(e__pdi__person.properties___age, '28'), 0)))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + WHERE ifNull(in(steps, [1, 2, 3]), 0) + ORDER BY aggregation_target ASC) AS source + INNER JOIN + (SELECT argMax(toTimeZone(person.created_at, 'UTC'), person.version) AS created_at, + person.id AS id + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) + ORDER BY persons.created_at DESC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_with_property_groups.2 + ''' + SELECT persons.id, + persons.id AS id, + persons.created_at AS created_at, + 1 + FROM + (SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, + avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, + median(step_1_conversion_time) AS step_1_median_conversion_time_inner, + median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'user signed up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha.com'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha2.com'), 0)), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + INNER JOIN + (SELECT person.id AS id, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'age'), ''), 'null'), '^"|"$', '') AS properties___age + FROM person + WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-07-01 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'user signed up')), or(and(ifNull(ilike(e__pdi__person.properties___email, '%.com%'), 0), ifNull(equals(e__pdi__person.properties___age, '20'), 0)), or(ifNull(ilike(e__pdi__person.properties___email, '%.org%'), 0), ifNull(equals(e__pdi__person.properties___age, '28'), 0)))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + WHERE ifNull(in(steps, [2, 3]), 0) + ORDER BY aggregation_target ASC) AS source + INNER JOIN + (SELECT argMax(toTimeZone(person.created_at, 'UTC'), person.version) AS created_at, + person.id AS id + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) + ORDER BY persons.created_at DESC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_with_property_groups.3 + ''' + SELECT persons.id, + persons.id AS id, + persons.created_at AS created_at, + 1 + FROM + (SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, + avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, + median(step_1_conversion_time) AS step_1_median_conversion_time_inner, + median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'user signed up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha.com'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha2.com'), 0)), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + INNER JOIN + (SELECT person.id AS id, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'age'), ''), 'null'), '^"|"$', '') AS properties___age + FROM person + WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-07-01 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'user signed up')), or(and(ifNull(ilike(e__pdi__person.properties___email, '%.com%'), 0), ifNull(equals(e__pdi__person.properties___age, '20'), 0)), or(ifNull(ilike(e__pdi__person.properties___email, '%.org%'), 0), ifNull(equals(e__pdi__person.properties___age, '28'), 0)))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + WHERE ifNull(in(steps, [3]), 0) + ORDER BY aggregation_target ASC) AS source + INNER JOIN + (SELECT argMax(toTimeZone(person.created_at, 'UTC'), person.version) AS created_at, + person.id AS id + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) + ORDER BY persons.created_at DESC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_with_static_cohort_step_filter + ''' + + SELECT count(DISTINCT person_id) + FROM person_static_cohort + WHERE team_id = 2 + AND cohort_id = 2 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_with_static_cohort_step_filter.1 + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + breakdown AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(and(equals(e.event, 'user signed up'), ifNull(in(e__pdi.person_id, + (SELECT person_static_cohort.person_id AS person_id + FROM person_static_cohort + WHERE and(equals(person_static_cohort.team_id, 2), equals(person_static_cohort.cohort_id, 2)))), 0)), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'paid'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFOSSFunnelUDF.test_timezones + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + breakdown AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT toTimeZone(e.timestamp, 'US/Pacific') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'user signed up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'paid'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'US/Pacific')), lessOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'US/Pacific'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelBreakdownUDF.test_funnel_breakdown_correct_breakdown_props_are_chosen + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelBreakdownUDF.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'step_1', 'ordered', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + prop_basic AS prop_basic, + prop_0 AS prop_0, + prop_1 AS prop_1, + prop, + prop_vals AS prop_vals, + prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0, + if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1, + prop_1 AS prop, + groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) ARRAY + JOIN prop_vals AS prop + WHERE ifNull(notEquals(prop, []), isNotNull(prop) + or isNotNull([]))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelBreakdownUDF.test_funnel_step_multiple_breakdown_snapshot + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['', '']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'buy'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), ''), ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_aggregate_by_groups_breakdown_group_person_on_events + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + exclusion_2 AS exclusion_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2 + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + exclusion_2 AS exclusion_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + exclusion_2 AS exclusion_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.1 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.2 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) ))))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.3 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.4 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) ))))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.5 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.6 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) ))))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.7 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.8 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) ))))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel.py b/posthog/hogql_queries/insights/funnels/test/test_funnel.py index 77033100a5009..575f76992ebf3 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel.py @@ -1,6 +1,7 @@ import uuid from datetime import datetime from typing import cast +from unittest.mock import patch, Mock from django.test import override_settings from freezegun import freeze_time @@ -68,6 +69,7 @@ def _create_action(**kwargs): return action +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) class TestFunnelBreakdown( ClickhouseTestMixin, funnel_breakdown_test_factory( # type: ignore @@ -81,6 +83,7 @@ class TestFunnelBreakdown( pass +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) class TestFunnelGroupBreakdown( ClickhouseTestMixin, funnel_breakdown_group_test_factory( # type: ignore @@ -91,6 +94,7 @@ class TestFunnelGroupBreakdown( pass +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) class TestFunnelConversionTime( ClickhouseTestMixin, funnel_conversion_time_test_factory(FunnelOrderType.ORDERED, ClickhouseFunnelActors), # type: ignore @@ -2460,10 +2464,14 @@ def test_advanced_funnel_exclusions_between_steps(self): query = cast(FunnelsQuery, filter_to_query(filters)) results = FunnelsQueryRunner(query=query, team=self.team).calculate().results - self.assertEqual(results[0]["name"], "user signed up") - self.assertEqual(results[0]["count"], 0) - - self.assertEqual(results[4]["count"], 0) + # There should be no events. UDF funnels returns an empty array and says "no events" + # Old style funnels returns a count of 0 + try: + self.assertEqual([], results) + except AssertionError: + self.assertEqual(results[0]["name"], "user signed up") + self.assertEqual(results[0]["count"], 0) + self.assertEqual(results[4]["count"], 0) self.assertCountEqual(self._get_actor_ids_at_step(filters, 1), []) @@ -3290,7 +3298,7 @@ def test_timezones(self): # event _create_person(distinct_ids=["user_1"], team_id=self.team.pk) - #  this event shouldn't appear as in US/Pacific this would be the previous day + # this event shouldn't appear as in US/Pacific this would be the previous day _create_event( team=self.team, event="user signed up", @@ -3300,9 +3308,13 @@ def test_timezones(self): query = cast(FunnelsQuery, filter_to_query(filters)) results = FunnelsQueryRunner(query=query, team=self.team).calculate().results - - self.assertEqual(results[0]["name"], "user signed up") - self.assertEqual(results[0]["count"], 0) + # There should be no events. UDF funnels returns an empty array and says "no events" + # Old style funnels returns a count of 0 + try: + self.assertEqual([], results) + except AssertionError: + self.assertEqual(results[0]["name"], "user signed up") + self.assertEqual(results[0]["count"], 0) def test_funnel_with_sampling(self): action_play_movie = Action.objects.create( @@ -4095,6 +4107,7 @@ def test_first_time_for_user_funnel_multiple_ids(self): return TestGetFunnel +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) class TestFOSSFunnel(funnel_test_factory(Funnel, _create_event, _create_person)): # type: ignore maxDiff = None diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_strict.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_strict.py index e146547285fae..7be35d81324d1 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_strict.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_strict.py @@ -1,5 +1,6 @@ from datetime import datetime from typing import cast +from unittest.mock import Mock, patch from posthog.constants import INSIGHT_FUNNELS, FunnelOrderType from posthog.hogql_queries.insights.funnels.funnels_query_runner import FunnelsQueryRunner @@ -38,7 +39,7 @@ def _create_action(**kwargs): return action -class TestFunnelStrictStepsBreakdown( +class BaseTestFunnelStrictStepsBreakdown( ClickhouseTestMixin, funnel_breakdown_test_factory( # type: ignore FunnelOrderType.STRICT, @@ -178,7 +179,7 @@ def test_strict_breakdown_events_with_multiple_properties(self): self.assertCountEqual(self._get_actor_ids_at_step(filters, 2, ["Safari"]), [people["person2"].uuid]) -class TestStrictFunnelGroupBreakdown( +class BaseTestStrictFunnelGroupBreakdown( ClickhouseTestMixin, funnel_breakdown_group_test_factory( # type: ignore FunnelOrderType.STRICT, @@ -188,7 +189,7 @@ class TestStrictFunnelGroupBreakdown( pass -class TestFunnelStrictStepsConversionTime( +class BaseTestFunnelStrictStepsConversionTime( ClickhouseTestMixin, funnel_conversion_time_test_factory(FunnelOrderType.ORDERED, ClickhouseFunnelStrictActors), # type: ignore ): @@ -196,7 +197,7 @@ class TestFunnelStrictStepsConversionTime( pass -class TestFunnelStrictSteps(ClickhouseTestMixin, APIBaseTest): +class BaseTestFunnelStrictSteps(ClickhouseTestMixin, APIBaseTest): maxDiff = None def _get_actor_ids_at_step(self, filter, funnel_step, breakdown_value=None): @@ -624,3 +625,23 @@ def test_basic_strict_funnel_conversion_times(self): self._get_actor_ids_at_step(filters, 3), [person3_stopped_after_insight_view.uuid], ) + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) +class TestFunnelStrictStepsBreakdown(BaseTestFunnelStrictStepsBreakdown): + pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) +class TestFunnelStrictSteps(BaseTestFunnelStrictSteps): + pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) +class TestStrictFunnelGroupBreakdown(BaseTestStrictFunnelGroupBreakdown): + pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) +class TestFunnelStrictStepsConversionTime(BaseTestFunnelStrictStepsConversionTime): + pass diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_strict_udf.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_strict_udf.py new file mode 100644 index 0000000000000..178e329d3748e --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_strict_udf.py @@ -0,0 +1,28 @@ +from unittest.mock import Mock, patch + +from posthog.hogql_queries.insights.funnels.test.test_funnel_strict import ( + BaseTestFunnelStrictStepsBreakdown, + BaseTestFunnelStrictSteps, + BaseTestStrictFunnelGroupBreakdown, + BaseTestFunnelStrictStepsConversionTime, +) + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestFunnelStrictStepsBreakdown(BaseTestFunnelStrictStepsBreakdown): + pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestFunnelStrictSteps(BaseTestFunnelStrictSteps): + pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestStrictFunnelGroupBreakdown(BaseTestStrictFunnelGroupBreakdown): + pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestFunnelStrictStepsConversionTime(BaseTestFunnelStrictStepsConversionTime): + pass diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py index 7b86e305ca89e..55a1f8660d0ca 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py @@ -1,5 +1,6 @@ from datetime import date, datetime, timedelta from typing import cast +from unittest.mock import patch, Mock from zoneinfo import ZoneInfo from freezegun.api import freeze_time @@ -23,7 +24,7 @@ FORMAT_TIME_DAY_END = "%Y-%m-%d 23:59:59" -class TestFunnelTrends(ClickhouseTestMixin, APIBaseTest): +class BaseTestFunnelTrends(ClickhouseTestMixin, APIBaseTest): maxDiff = None def _get_actors_at_step(self, filter, entrance_period_start, drop_off): @@ -130,43 +131,43 @@ def test_only_one_user_reached_one_step(self): [ { "reached_to_step_count": 0, - "conversion_rate": 0, + "conversion_rate": 0.0, "reached_from_step_count": 1, "timestamp": datetime(2021, 6, 7, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, - "conversion_rate": 0, + "conversion_rate": 0.0, "reached_from_step_count": 0, "timestamp": datetime(2021, 6, 8, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, - "conversion_rate": 0, + "conversion_rate": 0.0, "reached_from_step_count": 0, "timestamp": datetime(2021, 6, 9, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, - "conversion_rate": 0, + "conversion_rate": 0.0, "reached_from_step_count": 0, "timestamp": datetime(2021, 6, 10, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, - "conversion_rate": 0, + "conversion_rate": 0.0, "reached_from_step_count": 0, "timestamp": datetime(2021, 6, 11, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, - "conversion_rate": 0, + "conversion_rate": 0.0, "reached_from_step_count": 0, "timestamp": datetime(2021, 6, 12, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, - "conversion_rate": 0, + "conversion_rate": 0.0, "reached_from_step_count": 0, "timestamp": datetime(2021, 6, 13, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, @@ -1611,3 +1612,8 @@ def test_parses_breakdown_correctly(self): results = FunnelsQueryRunner(query=query, team=self.team).calculate().results self.assertEqual(len(results), 1) + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) +class TestFunnelTrends(BaseTestFunnelTrends): + pass diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_udf.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_udf.py new file mode 100644 index 0000000000000..6965222b749f5 --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_udf.py @@ -0,0 +1,8 @@ +from unittest.mock import patch, Mock + +from posthog.hogql_queries.insights.funnels.test.test_funnel_trends import BaseTestFunnelTrends + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestFunnelTrendsUDF(BaseTestFunnelTrends): + pass diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_udf.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_udf.py new file mode 100644 index 0000000000000..2844d4b7792d4 --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_udf.py @@ -0,0 +1,66 @@ +from unittest.mock import patch, Mock + +from posthog.constants import FunnelOrderType +from posthog.hogql_queries.insights.funnels import Funnel +from posthog.hogql_queries.insights.funnels.test.breakdown_cases import ( + funnel_breakdown_test_factory, + funnel_breakdown_group_test_factory, +) +from posthog.models import Action +from posthog.queries.funnels import ClickhouseFunnelActors +from posthog.test.base import ( + ClickhouseTestMixin, + _create_event, + _create_person, +) +from test_funnel import funnel_test_factory +from posthog.hogql_queries.insights.funnels.test.conversion_time_cases import ( + funnel_conversion_time_test_factory, +) + + +def _create_action(**kwargs): + team = kwargs.pop("team") + name = kwargs.pop("name") + properties = kwargs.pop("properties", {}) + action = Action.objects.create(team=team, name=name, steps_json=[{"event": name, "properties": properties}]) + return action + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestFunnelBreakdownUDF( + ClickhouseTestMixin, + funnel_breakdown_test_factory( # type: ignore + FunnelOrderType.ORDERED, + ClickhouseFunnelActors, + _create_action, + _create_person, + ), +): + maxDiff = None + pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestFunnelGroupBreakdownUDF( + ClickhouseTestMixin, + funnel_breakdown_group_test_factory( # type: ignore + FunnelOrderType.ORDERED, + ClickhouseFunnelActors, + ), +): + pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestFOSSFunnelUDF(funnel_test_factory(Funnel, _create_event, _create_person)): # type: ignore + maxDiff = None + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestFunnelConversionTimeUDF( + ClickhouseTestMixin, + funnel_conversion_time_test_factory(FunnelOrderType.ORDERED, ClickhouseFunnelActors), # type: ignore +): + maxDiff = None + pass diff --git a/posthog/hogql_queries/insights/funnels/utils.py b/posthog/hogql_queries/insights/funnels/utils.py index d5c968a913494..31c31fa9b361d 100644 --- a/posthog/hogql_queries/insights/funnels/utils.py +++ b/posthog/hogql_queries/insights/funnels/utils.py @@ -5,15 +5,18 @@ from rest_framework.exceptions import ValidationError -def get_funnel_order_class(funnelsFilter: FunnelsFilter): +def get_funnel_order_class(funnelsFilter: FunnelsFilter, use_udf=False): from posthog.hogql_queries.insights.funnels import ( Funnel, + FunnelUDF, FunnelStrict, FunnelUnordered, ) if funnelsFilter.funnelOrderType == StepOrderValue.UNORDERED: return FunnelUnordered + elif use_udf: + return FunnelUDF elif funnelsFilter.funnelOrderType == StepOrderValue.STRICT: return FunnelStrict return Funnel diff --git a/posthog/hogql_queries/insights/utils/entities.py b/posthog/hogql_queries/insights/utils/entities.py index 794ce6170da11..b14653b338035 100644 --- a/posthog/hogql_queries/insights/utils/entities.py +++ b/posthog/hogql_queries/insights/utils/entities.py @@ -1,6 +1,7 @@ from posthog.schema import ( ActionsNode, CohortPropertyFilter, + DataWarehouseNode, EmptyPropertyFilter, EventsNode, FunnelExclusionActionsNode, @@ -9,16 +10,16 @@ ) from posthog.types import AnyPropertyFilter, EntityNode, ExclusionEntityNode from collections import Counter -from rest_framework.exceptions import ValidationError def is_equal_type(a: EntityNode, b: EntityNode | ExclusionEntityNode) -> bool: if isinstance(a, EventsNode): return isinstance(b, EventsNode) or isinstance(b, FunnelExclusionEventsNode) - elif isinstance(a, ActionsNode): + if isinstance(a, ActionsNode): return isinstance(b, ActionsNode) or isinstance(b, FunnelExclusionActionsNode) - else: - raise ValidationError(detail=f"Type comparision for {type(a)} and {type(b)} not implemented.") + if isinstance(a, DataWarehouseNode): + return isinstance(b, DataWarehouseNode) + raise ValueError(detail=f"Type comparison for {type(a)} and {type(b)} not implemented.") def is_equal(a: EntityNode, b: EntityNode | ExclusionEntityNode, compare_properties=True) -> bool: @@ -44,6 +45,14 @@ def is_equal(a: EntityNode, b: EntityNode | ExclusionEntityNode, compare_propert ): return False + # different data source + if ( + isinstance(a, DataWarehouseNode) + and isinstance(b, DataWarehouseNode) + and (a.id != b.id or a.id_field != b.id_field) + ): + return False + # different properties if compare_properties and _sorted_property_reprs(a.properties) != _sorted_property_reprs(b.properties): return False diff --git a/posthog/hogql_queries/insights/utils/utils.py b/posthog/hogql_queries/insights/utils/utils.py index 747d7e2b6ca5a..15689aba7927e 100644 --- a/posthog/hogql_queries/insights/utils/utils.py +++ b/posthog/hogql_queries/insights/utils/utils.py @@ -10,3 +10,8 @@ def get_start_of_interval_hogql(interval: str, *, team: Team, source: Optional[a if trunc_func == "toStartOfWeek": trunc_func_args.append(ast.Constant(value=int((WeekStartDay(team.week_start_day or 0)).clickhouse_mode))) return ast.Call(name=trunc_func, args=trunc_func_args) + + +def get_start_of_interval_hogql_str(interval: str, *, team: Team, source: str) -> str: + trunc_func = get_trunc_func_ch(interval) + return f"{trunc_func}({source}{f', {int((WeekStartDay(team.week_start_day or 0)).clickhouse_mode)}' if trunc_func == 'toStartOfWeek' else ''})" diff --git a/posthog/hogql_queries/legacy_compatibility/feature_flag.py b/posthog/hogql_queries/legacy_compatibility/feature_flag.py index 6b4a6e0e22a0b..697e479f6031f 100644 --- a/posthog/hogql_queries/legacy_compatibility/feature_flag.py +++ b/posthog/hogql_queries/legacy_compatibility/feature_flag.py @@ -21,3 +21,24 @@ def hogql_insights_replace_filters(team: Team) -> bool: only_evaluate_locally=True, send_feature_flag_events=False, ) + + +def insight_funnels_use_udf(team: Team) -> bool: + return posthoganalytics.feature_enabled( + "insight-funnels-use-udf", + str(team.uuid), + groups={ + "organization": str(team.organization_id), + "project": str(team.id), + }, + group_properties={ + "organization": { + "id": str(team.organization_id), + }, + "project": { + "id": str(team.id), + }, + }, + only_evaluate_locally=False, + send_feature_flag_events=False, + ) diff --git a/posthog/hogql_queries/query_runner.py b/posthog/hogql_queries/query_runner.py index 824e2e1e91177..fb1749abde18a 100644 --- a/posthog/hogql_queries/query_runner.py +++ b/posthog/hogql_queries/query_runner.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod from datetime import datetime, timedelta, UTC -from enum import IntEnum +from enum import StrEnum from typing import Any, Generic, Optional, TypeVar, Union, cast, TypeGuard import structlog @@ -74,48 +74,48 @@ EXTENDED_CACHE_AGE = timedelta(days=1) -class ExecutionMode(IntEnum): # Keep integer values the same for Celery's sake - CALCULATE_BLOCKING_ALWAYS = 5 +class ExecutionMode(StrEnum): + CALCULATE_BLOCKING_ALWAYS = "force_blocking" """Always recalculate.""" - CALCULATE_ASYNC_ALWAYS = 4 + CALCULATE_ASYNC_ALWAYS = "force_async" """Always kick off async calculation.""" - RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE = 3 + RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE = "blocking" """Use cache, unless the results are missing or stale.""" - RECENT_CACHE_CALCULATE_ASYNC_IF_STALE = 2 + RECENT_CACHE_CALCULATE_ASYNC_IF_STALE = "async" """Use cache, kick off async calculation when results are missing or stale.""" - EXTENDED_CACHE_CALCULATE_ASYNC_IF_STALE = 1 + EXTENDED_CACHE_CALCULATE_ASYNC_IF_STALE = "lazy_async" """Use cache for longer, kick off async calculation when results are missing or stale.""" - CACHE_ONLY_NEVER_CALCULATE = 0 + CACHE_ONLY_NEVER_CALCULATE = "force_cache" """Do not initiate calculation.""" +_REFRESH_TO_EXECUTION_MODE: dict[str | bool, ExecutionMode] = { + **ExecutionMode._value2member_map_, # type: ignore + True: ExecutionMode.CALCULATE_BLOCKING_ALWAYS, +} + + def execution_mode_from_refresh(refresh_requested: bool | str | None) -> ExecutionMode: - refresh_map = { - "blocking": ExecutionMode.RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE, - "async": ExecutionMode.RECENT_CACHE_CALCULATE_ASYNC_IF_STALE, - "lazy_async": ExecutionMode.EXTENDED_CACHE_CALCULATE_ASYNC_IF_STALE, - "force_async": ExecutionMode.CALCULATE_ASYNC_ALWAYS, - "force_blocking": ExecutionMode.CALCULATE_BLOCKING_ALWAYS, - "force_cache": ExecutionMode.CACHE_ONLY_NEVER_CALCULATE, - True: ExecutionMode.CALCULATE_BLOCKING_ALWAYS, - } - if refresh_requested in refresh_map: - return refresh_map[refresh_requested] + if refresh_requested: + if execution_mode := _REFRESH_TO_EXECUTION_MODE.get(refresh_requested): + return execution_mode return ExecutionMode.CACHE_ONLY_NEVER_CALCULATE +_SHARED_MODE_WHITELIST = { + # Cache only is default refresh mode - remap to async so shared insights stay fresh + ExecutionMode.CACHE_ONLY_NEVER_CALCULATE: ExecutionMode.EXTENDED_CACHE_CALCULATE_ASYNC_IF_STALE, + # Legacy refresh=true - but on shared insights, we don't give the ability to refresh at will + # TODO: Adjust once shared insights can poll for async query_status + ExecutionMode.CALCULATE_BLOCKING_ALWAYS: ExecutionMode.RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE, + # Allow regular async + ExecutionMode.RECENT_CACHE_CALCULATE_ASYNC_IF_STALE: ExecutionMode.RECENT_CACHE_CALCULATE_ASYNC_IF_STALE, + # - All others fall back to extended cache - +} + + def shared_insights_execution_mode(execution_mode: ExecutionMode) -> ExecutionMode: - shared_mode_whitelist = { - # Cache only is default refresh mode - remap to async so shared insights stay fresh - ExecutionMode.CACHE_ONLY_NEVER_CALCULATE: ExecutionMode.EXTENDED_CACHE_CALCULATE_ASYNC_IF_STALE, - # Legacy refresh=true - but on shared insights, we don't give the ability to refresh at will - # TODO: Adjust once shared insights can poll for async query_status - ExecutionMode.CALCULATE_BLOCKING_ALWAYS: ExecutionMode.RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE, - # Allow regular async - ExecutionMode.RECENT_CACHE_CALCULATE_ASYNC_IF_STALE: ExecutionMode.RECENT_CACHE_CALCULATE_ASYNC_IF_STALE, - # - All others fall back to extended cache - - } - return shared_mode_whitelist.get(execution_mode, ExecutionMode.EXTENDED_CACHE_CALCULATE_ASYNC_IF_STALE) + return _SHARED_MODE_WHITELIST.get(execution_mode, ExecutionMode.EXTENDED_CACHE_CALCULATE_ASYNC_IF_STALE) RunnableQueryNode = Union[ @@ -330,6 +330,17 @@ def get_query_runner( limit_context=limit_context, ) + if kind == "WebExternalClicksTableQuery": + from .web_analytics.external_clicks import WebExternalClicksTableQueryRunner + + return WebExternalClicksTableQueryRunner( + query=query, + team=team, + timings=timings, + modifiers=modifiers, + limit_context=limit_context, + ) + if kind == "SessionAttributionExplorerQuery": from .web_analytics.session_attribution_explorer_query_runner import SessionAttributionExplorerQueryRunner @@ -352,6 +363,17 @@ def get_query_runner( limit_context=limit_context, ) + if kind == "ExperimentResultQuery": + from .experiment_result_query_runner import ExperimentResultQueryRunner + + return ExperimentResultQueryRunner( + query=query, + team=team, + timings=timings, + modifiers=modifiers, + limit_context=limit_context, + ) + raise ValueError(f"Can't get a runner for an unknown query kind: {kind}") diff --git a/posthog/hogql_queries/test/test_events_query_runner.py b/posthog/hogql_queries/test/test_events_query_runner.py index f42fe3dc65755..70c447b48a174 100644 --- a/posthog/hogql_queries/test/test_events_query_runner.py +++ b/posthog/hogql_queries/test/test_events_query_runner.py @@ -156,3 +156,31 @@ def test_test_account_filters(self): right_expr = cast(ast.Constant, where_expr.right) self.assertEqual(right_expr.value, "%posthog.com%") self.assertEqual(where_expr.op, CompareOperationOp.NotILike) + + def test_big_int(self): + BIG_INT = 2**159 - 24 + self._create_events( + data=[ + ( + "p_null", + "2020-01-11T12:00:04Z", + {"boolean_field": None, "bigInt": BIG_INT}, + ), + ] + ) + + flush_persons_and_events() + + with freeze_time("2020-01-11T12:01:00"): + query = EventsQuery( + after="-24h", + event="$pageview", + kind="EventsQuery", + orderBy=["timestamp ASC"], + select=["*"], + ) + + runner = EventsQueryRunner(query=query, team=self.team) + response = runner.run() + assert isinstance(response, CachedEventsQueryResponse) + assert response.results[0][0]["properties"]["bigInt"] == float(BIG_INT) diff --git a/posthog/hogql_queries/test/test_experiment_result_query_runner.py b/posthog/hogql_queries/test/test_experiment_result_query_runner.py new file mode 100644 index 0000000000000..f58e90358bd5c --- /dev/null +++ b/posthog/hogql_queries/test/test_experiment_result_query_runner.py @@ -0,0 +1,112 @@ +from posthog.hogql_queries.experiment_result_query_runner import ExperimentResultQueryRunner +from posthog.schema import ( + BreakdownFilter, + EventsNode, + ExperimentResultQuery, + FunnelsQuery, + TrendsQuery, + ExperimentResultFunnelQueryResponse, + ExperimentResultTrendQueryResponse, +) +from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person, flush_persons_and_events +from freezegun import freeze_time +from typing import cast + + +class TestExperimentResultQueryRunner(ClickhouseTestMixin, APIBaseTest): + def setUp(self): + super().setUp() + + def test_experiment_result_query_runner_FUNNEL(self): + feature_flag_property = f"$feature/test-experiment" + + with freeze_time("2020-01-10 12:00:00"): + for variant, purchase_count in [("control", 6), ("test", 8)]: + for i in range(10): + _create_person(distinct_ids=[f"user_{variant}_{i}"], team_id=self.team.pk) + _create_event( + team=self.team, + event="$pageview", + distinct_id=f"user_{variant}_{i}", + timestamp="2020-01-02T12:00:00Z", + properties={feature_flag_property: variant}, + ) + if i < purchase_count: + _create_event( + team=self.team, + event="purchase", + distinct_id=f"user_{variant}_{i}", + timestamp="2020-01-02T12:01:00Z", + properties={feature_flag_property: variant}, + ) + + flush_persons_and_events() + + funnels_query = FunnelsQuery( + series=[EventsNode(event="$pageview"), EventsNode(event="purchase")], + dateRange={"date_from": "2020-01-01", "date_to": "2020-01-14"}, + breakdownFilter=BreakdownFilter(breakdown=feature_flag_property), + ) + experiment_query = ExperimentResultQuery( + kind="ExperimentResultQuery", source=funnels_query, variants=["control", "test"] + ) + + runner = ExperimentResultQueryRunner(query=experiment_query, team=self.team) + result = runner.calculate() + + self.assertEqual(result.insight, "FUNNELS") + self.assertEqual(len(result.results), 2) + + funnel_result = cast(ExperimentResultFunnelQueryResponse, result) + + self.assertIn("control", funnel_result.results) + self.assertIn("test", funnel_result.results) + + control_result = funnel_result.results["control"] + test_result = funnel_result.results["test"] + + self.assertEqual(control_result.success_count, 6) + self.assertEqual(control_result.failure_count, 4) + self.assertEqual(test_result.success_count, 8) + self.assertEqual(test_result.failure_count, 2) + + def test_experiment_result_query_runner_TRENDS(self): + feature_flag_property = f"$feature/test-experiment" + + for variant, count in [("control", 11), ("test", 15)]: + for i in range(count): + with freeze_time("2020-01-10 12:00:00"): + _create_event( + team=self.team, + event="$pageview", + distinct_id=f"user_{variant}_{i}", + properties={feature_flag_property: variant}, + ) + flush_persons_and_events() + + trends_query = TrendsQuery( + kind="TrendsQuery", + series=[EventsNode(event="$pageview")], + dateRange={"date_from": "2020-01-01", "date_to": "2020-01-14"}, + breakdownFilter=BreakdownFilter(breakdown=feature_flag_property), + ) + experiment_query = ExperimentResultQuery( + kind="ExperimentResultQuery", source=trends_query, variants=["control", "test"] + ) + + runner = ExperimentResultQueryRunner(query=experiment_query, team=self.team) + result = runner.calculate() + + self.assertEqual(result.insight, "TRENDS") + self.assertEqual(len(result.results), 2) + + trend_result = cast(ExperimentResultTrendQueryResponse, result) + + self.assertIn("control", trend_result.results) + self.assertIn("test", trend_result.results) + + control_result = trend_result.results["control"] + test_result = trend_result.results["test"] + + self.assertEqual(control_result.count, 11) + self.assertEqual(test_result.count, 15) diff --git a/posthog/hogql_queries/web_analytics/external_clicks.py b/posthog/hogql_queries/web_analytics/external_clicks.py new file mode 100644 index 0000000000000..5d603ab16deca --- /dev/null +++ b/posthog/hogql_queries/web_analytics/external_clicks.py @@ -0,0 +1,119 @@ +from posthog.hogql import ast +from posthog.hogql.constants import LimitContext +from posthog.hogql.parser import parse_select +from posthog.hogql.property import ( + property_to_expr, +) +from posthog.hogql_queries.insights.paginators import HogQLHasMorePaginator +from posthog.hogql_queries.web_analytics.web_analytics_query_runner import ( + WebAnalyticsQueryRunner, + map_columns, +) +from posthog.schema import ( + CachedWebStatsTableQueryResponse, + WebStatsTableQueryResponse, + WebExternalClicksTableQuery, + WebExternalClicksTableQueryResponse, +) + + +class WebExternalClicksTableQueryRunner(WebAnalyticsQueryRunner): + query: WebExternalClicksTableQuery + response: WebExternalClicksTableQueryResponse + cached_response: CachedWebStatsTableQueryResponse + paginator: HogQLHasMorePaginator + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.paginator = HogQLHasMorePaginator.from_limit_context( + limit_context=LimitContext.QUERY, limit=self.query.limit if self.query.limit else None + ) + + def to_query(self) -> ast.SelectQuery: + if self.query.stripQueryParams: + url_expr: ast.Expr = ast.Call( + name="cutQueryStringAndFragment", + args=[ast.Field(chain=["properties", "$external_click_url"])], + ) + else: + url_expr = ast.Field(chain=["properties", "$external_click_url"]) + + with self.timings.measure("stats_table_query"): + query = parse_select( + """ +SELECT + url AS "context.columns.url", + uniq(filtered_person_id) AS "context.columns.visitors", + sum(filtered_click_count) AS "context.columns.clicks" +FROM ( + SELECT + any(person_id) AS filtered_person_id, + count() AS filtered_click_count, + {url_expr} AS url + FROM events + WHERE and( + timestamp >= {date_from}, + timestamp < {date_to}, + events.event == '$autocapture', + events.properties.$event_type == 'click', + url IS NOT NULL, + url != '', + {all_properties} + ) + GROUP BY events.`$session_id`, url +) +GROUP BY "context.columns.url" +ORDER BY "context.columns.visitors" DESC, +"context.columns.url" ASC +""", + timings=self.timings, + placeholders={ + "url_expr": url_expr, + "all_properties": self._all_properties(), + "date_from": self._date_from(), + "date_to": self._date_to(), + }, + ) + assert isinstance(query, ast.SelectQuery) + return query + + def _all_properties(self) -> ast.Expr: + properties = self.query.properties + self._test_account_filters + return property_to_expr(properties, team=self.team) + + def _date_to(self) -> ast.Expr: + return self.query_date_range.date_to_as_hogql() + + def _date_from(self) -> ast.Expr: + return self.query_date_range.date_from_as_hogql() + + def calculate(self): + query = self.to_query() + response = self.paginator.execute_hogql_query( + query_type="stats_table_query", + query=query, + team=self.team, + timings=self.timings, + modifiers=self.modifiers, + ) + results = self.paginator.results + + assert results is not None + + results_mapped = map_columns( + results, + { + 1: self._unsample, # views + 2: self._unsample, # visitors + }, + ) + + return WebStatsTableQueryResponse( + columns=response.columns, + results=results_mapped, + timings=response.timings, + types=response.types, + hogql=response.hogql, + modifiers=self.modifiers, + **self.paginator.response_params(), + ) diff --git a/posthog/hogql_queries/web_analytics/test/test_external_clicks_table.py b/posthog/hogql_queries/web_analytics/test/test_external_clicks_table.py new file mode 100644 index 0000000000000..19258264e0aed --- /dev/null +++ b/posthog/hogql_queries/web_analytics/test/test_external_clicks_table.py @@ -0,0 +1,211 @@ +from typing import Optional + +from freezegun import freeze_time + +from posthog.hogql_queries.web_analytics.external_clicks import WebExternalClicksTableQueryRunner +from posthog.models.utils import uuid7 +from posthog.schema import ( + DateRange, + SessionTableVersion, + HogQLQueryModifiers, + WebExternalClicksTableQuery, +) +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + _create_event, + _create_person, +) + + +class TestExternalClicksTableQueryRunner(ClickhouseTestMixin, APIBaseTest): + def _create_events(self, data, event="$autocapture"): + person_result = [] + for id, timestamps in data: + with freeze_time(timestamps[0][0]): + person_result.append( + _create_person( + team_id=self.team.pk, + distinct_ids=[id], + properties={ + "name": id, + **({"email": "test@posthog.com"} if id == "test" else {}), + }, + ) + ) + for timestamp, session_id, pathname, click in timestamps: + _create_event( + team=self.team, + event=event, + distinct_id=id, + timestamp=timestamp, + properties={ + "$session_id": session_id, + "$pathname": pathname, + "$event_type": "click", + "$external_click_url": click, + }, + elements_chain=f'a:href="{click}"', + ) + return person_result + + def _run_external_clicks_table_query( + self, + date_from, + date_to, + limit=None, + properties=None, + session_table_version: SessionTableVersion = SessionTableVersion.V2, + filter_test_accounts: Optional[bool] = False, + strip_query_params: Optional[bool] = False, + ): + modifiers = HogQLQueryModifiers(sessionTableVersion=session_table_version) + query = WebExternalClicksTableQuery( + dateRange=DateRange(date_from=date_from, date_to=date_to), + properties=properties or [], + limit=limit, + filterTestAccounts=filter_test_accounts, + stripQueryParams=strip_query_params, + ) + runner = WebExternalClicksTableQueryRunner(team=self.team, query=query, modifiers=modifiers) + return runner.calculate() + + def test_no_crash_when_no_data(self): + results = self._run_external_clicks_table_query("2023-12-08", "2023-12-15").results + self.assertEqual([], results) + + def test_increase_in_users( + self, + ): + s1a = str(uuid7("2023-12-02")) + s1b = str(uuid7("2023-12-13")) + s2 = str(uuid7("2023-12-10")) + self._create_events( + [ + ( + "p1", + [ + ("2023-12-02", s1a, "/", "https://www.example.com/"), + ("2023-12-03", s1a, "/login", "https://www.example.com/login"), + ("2023-12-13", s1b, "/docs", "https://www.example.com/docs"), + ], + ), + ("p2", [("2023-12-10", s2, "/", "https://www.example.com/")]), + ] + ) + + results = self._run_external_clicks_table_query("2023-12-01", "2023-12-11").results + + self.assertEqual( + [ + ["https://www.example.com/", 2, 2], + ["https://www.example.com/login", 1, 1], + ], + results, + ) + + def test_all_time(self): + s1a = str(uuid7("2023-12-02")) + s1b = str(uuid7("2023-12-13")) + s2 = str(uuid7("2023-12-10")) + self._create_events( + [ + ( + "p1", + [ + ("2023-12-02", s1a, "/", "https://www.example.com/"), + ("2023-12-03", s1a, "/login", "https://www.example.com/login"), + ("2023-12-13", s1b, "/docs", "https://www.example.com/docs"), + ], + ), + ("p2", [("2023-12-10", s2, "/", "https://www.example.com/")]), + ] + ) + + results = self._run_external_clicks_table_query("all", "2023-12-15").results + + self.assertEqual( + [ + ["https://www.example.com/", 2, 2], + ["https://www.example.com/docs", 1, 1], + ["https://www.example.com/login", 1, 1], + ], + results, + ) + + def test_filter_test_accounts(self): + s1 = str(uuid7("2023-12-02")) + # Create 1 test account + self._create_events( + [ + ( + "test", + [ + ("2023-12-02", s1, "/", "https://www.example.com/"), + ("2023-12-03", s1, "/login", "https://www.example.com/login"), + ], + ) + ] + ) + + results = self._run_external_clicks_table_query("2023-12-01", "2023-12-03", filter_test_accounts=True).results + + self.assertEqual( + [], + results, + ) + + def test_dont_filter_test_accounts(self): + s1 = str(uuid7("2023-12-02")) + # Create 1 test account + self._create_events( + [ + ( + "test", + [ + ("2023-12-02", s1, "/", "https://www.example.com/"), + ("2023-12-03", s1, "/login", "https://www.example.com/login"), + ], + ) + ] + ) + + results = self._run_external_clicks_table_query("2023-12-01", "2023-12-03", filter_test_accounts=False).results + + self.assertEqual( + [["https://www.example.com/", 1, 1], ["https://www.example.com/login", 1, 1]], + results, + ) + + def test_strip_query_params(self): + s1 = str(uuid7("2023-12-02")) + # Create 1 test account + self._create_events( + [ + ( + "test", + [ + ("2023-12-02", s1, "/login", "https://www.example.com/login?test=1#foo"), + ("2023-12-03", s1, "/login", "https://www.example.com/login#bar"), + ], + ) + ] + ) + + results_strip = self._run_external_clicks_table_query( + "2023-12-01", "2023-12-03", filter_test_accounts=False, strip_query_params=True + ).results + + self.assertEqual( + [["https://www.example.com/login", 1, 2]], + results_strip, + ) + + results_no_strip = self._run_external_clicks_table_query( + "2023-12-01", "2023-12-03", filter_test_accounts=False, strip_query_params=False + ).results + + self.assertEqual( + [["https://www.example.com/login#bar", 1, 1], ["https://www.example.com/login?test=1#foo", 1, 1]], + results_no_strip, + ) diff --git a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py index 7f0f2790fd5b2..d3e2271e6a3ed 100644 --- a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py +++ b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py @@ -25,10 +25,13 @@ SamplingRate, SessionPropertyFilter, WebGoalsQuery, + WebExternalClicksTableQuery, ) from posthog.utils import generate_cache_key, get_safe_cache -WebQueryNode = Union[WebOverviewQuery, WebTopClicksQuery, WebStatsTableQuery, WebGoalsQuery] +WebQueryNode = Union[ + WebOverviewQuery, WebTopClicksQuery, WebStatsTableQuery, WebGoalsQuery, WebExternalClicksTableQuery +] class WebAnalyticsQueryRunner(QueryRunner, ABC): diff --git a/posthog/migrations/0466_alter_externaldatasource_source_type.py b/posthog/migrations/0466_alter_externaldatasource_source_type.py new file mode 100644 index 0000000000000..4a4b2f522f68b --- /dev/null +++ b/posthog/migrations/0466_alter_externaldatasource_source_type.py @@ -0,0 +1,30 @@ +# Generated by Django 4.2.15 on 2024-09-05 10:44 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0465_datawarehouse_stripe_account"), + ] + + operations = [ + migrations.AlterField( + model_name="externaldatasource", + name="source_type", + field=models.CharField( + choices=[ + ("Stripe", "Stripe"), + ("Hubspot", "Hubspot"), + ("Postgres", "Postgres"), + ("Zendesk", "Zendesk"), + ("Snowflake", "Snowflake"), + ("Salesforce", "Salesforce"), + ("MySQL", "MySQL"), + ("MSSQL", "MSSQL"), + ("Vitally", "Vitally"), + ], + max_length=128, + ), + ), + ] diff --git a/posthog/migrations/0467_add_web_vitals_allowed_metrics.py b/posthog/migrations/0467_add_web_vitals_allowed_metrics.py new file mode 100644 index 0000000000000..79e806a37ebf7 --- /dev/null +++ b/posthog/migrations/0467_add_web_vitals_allowed_metrics.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.15 on 2024-09-08 10:10 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0466_alter_externaldatasource_source_type"), + ] + + operations = [ + migrations.AddField( + model_name="team", + name="autocapture_web_vitals_allowed_metrics", + field=models.JSONField(blank=True, null=True), + ), + ] diff --git a/posthog/models/filters/test/__snapshots__/test_filter.ambr b/posthog/models/filters/test/__snapshots__/test_filter.ambr index 385ff96f6cec0..35b52b6f527a2 100644 --- a/posthog/models/filters/test/__snapshots__/test_filter.ambr +++ b/posthog/models/filters/test/__snapshots__/test_filter.ambr @@ -17,6 +17,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -80,6 +81,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -143,6 +145,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -206,6 +209,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -269,6 +273,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", diff --git a/posthog/models/person_overrides/sql.py b/posthog/models/person_overrides/sql.py index c518db6de0e11..5c7a82c9469c1 100644 --- a/posthog/models/person_overrides/sql.py +++ b/posthog/models/person_overrides/sql.py @@ -69,7 +69,7 @@ ENGINE = ReplicatedReplacingMergeTree( -- NOTE: for testing we use a uuid to ensure that we don't get conflicts -- when the tests tear down and recreate the table. - '/clickhouse/tables/{'{uuid}' if settings.TEST else ''}noshard/{CLICKHOUSE_DATABASE}.person_overrides', + '/clickhouse/tables/{'{uuid}' if settings.TEST or settings.E2E_TESTING else ''}noshard/{CLICKHOUSE_DATABASE}.person_overrides', '{{replica}}-{{shard}}', version ) diff --git a/posthog/models/team/team.py b/posthog/models/team/team.py index 10e658022f386..3aaedbcd5a6fa 100644 --- a/posthog/models/team/team.py +++ b/posthog/models/team/team.py @@ -212,6 +212,7 @@ class Meta: ingested_event = models.BooleanField(default=False) autocapture_opt_out = models.BooleanField(null=True, blank=True) autocapture_web_vitals_opt_in = models.BooleanField(null=True, blank=True) + autocapture_web_vitals_allowed_metrics = models.JSONField(null=True, blank=True) autocapture_exceptions_opt_in = models.BooleanField(null=True, blank=True) autocapture_exceptions_errors_to_ignore = models.JSONField(null=True, blank=True) session_recording_opt_in = models.BooleanField(default=False) diff --git a/posthog/models/utils.py b/posthog/models/utils.py index e9498ce32990e..3dc7e83940b9a 100644 --- a/posthog/models/utils.py +++ b/posthog/models/utils.py @@ -164,7 +164,7 @@ class Meta: class UUIDModel(models.Model): """Base Django Model with default autoincremented ID field replaced with UUIDT.""" - id = models.UUIDField(primary_key=True, default=UUIDT, editable=False) + id: models.UUIDField = models.UUIDField(primary_key=True, default=UUIDT, editable=False) class Meta: abstract = True diff --git a/posthog/plugins/plugin_server_api.py b/posthog/plugins/plugin_server_api.py index fd18e49d16ed7..ef6b312ba874c 100644 --- a/posthog/plugins/plugin_server_api.py +++ b/posthog/plugins/plugin_server_api.py @@ -4,6 +4,7 @@ import structlog from posthog.redis import get_client from posthog.settings import CDP_FUNCTION_EXECUTOR_API_URL, PLUGINS_RELOAD_PUBSUB_CHANNEL, PLUGINS_RELOAD_REDIS_URL +from posthog.models.utils import UUIDT logger = structlog.get_logger(__name__) @@ -62,7 +63,7 @@ def populate_plugin_capabilities_on_workers(plugin_id: str): def create_hog_invocation_test( team_id: int, - hog_function_id: str, + hog_function_id: UUIDT, globals: dict, configuration: dict, mock_async_functions: bool, @@ -78,13 +79,13 @@ def create_hog_invocation_test( ) -def get_hog_function_status(team_id: int, hog_function_id: str) -> requests.Response: +def get_hog_function_status(team_id: int, hog_function_id: UUIDT) -> requests.Response: return requests.get( CDP_FUNCTION_EXECUTOR_API_URL + f"/api/projects/{team_id}/hog_functions/{hog_function_id}/status" ) -def patch_hog_function_status(team_id: int, hog_function_id: str, state: int) -> requests.Response: +def patch_hog_function_status(team_id: int, hog_function_id: UUIDT, state: int) -> requests.Response: return requests.patch( CDP_FUNCTION_EXECUTOR_API_URL + f"/api/projects/{team_id}/hog_functions/{hog_function_id}/status", json={"state": state}, diff --git a/posthog/schema.py b/posthog/schema.py index 5f42c48868676..fb44402f280dd 100644 --- a/posthog/schema.py +++ b/posthog/schema.py @@ -495,6 +495,21 @@ class EventsQueryPersonColumn(BaseModel): uuid: str +class ExperimentVariantFunnelResult(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + failure_count: float + success_count: float + + +class ExperimentVariantTrendResult(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + count: float + + class FilterLogicalOperator(StrEnum): AND_ = "AND" OR_ = "OR" @@ -802,6 +817,7 @@ class NodeKind(StrEnum): RECORDINGS_QUERY = "RecordingsQuery" SESSION_ATTRIBUTION_EXPLORER_QUERY = "SessionAttributionExplorerQuery" ERROR_TRACKING_QUERY = "ErrorTrackingQuery" + EXPERIMENT_RESULT_QUERY = "ExperimentResultQuery" DATA_TABLE_NODE = "DataTableNode" DATA_VISUALIZATION_NODE = "DataVisualizationNode" SAVED_INSIGHT_NODE = "SavedInsightNode" @@ -818,6 +834,7 @@ class NodeKind(StrEnum): WEB_OVERVIEW_QUERY = "WebOverviewQuery" WEB_TOP_CLICKS_QUERY = "WebTopClicksQuery" WEB_STATS_TABLE_QUERY = "WebStatsTableQuery" + WEB_EXTERNAL_CLICKS_TABLE_QUERY = "WebExternalClicksTableQuery" WEB_GOALS_QUERY = "WebGoalsQuery" DATABASE_SCHEMA_QUERY = "DatabaseSchemaQuery" @@ -963,6 +980,22 @@ class QueryResponseAlternative7(BaseModel): warnings: list[HogQLNotice] +class QueryResponseAlternative26(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + insight: Literal["TRENDS"] = "TRENDS" + results: dict[str, ExperimentVariantTrendResult] + + +class QueryResponseAlternative27(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + insight: Literal["FUNNELS"] = "FUNNELS" + results: dict[str, ExperimentVariantFunnelResult] + + class QueryStatus(BaseModel): model_config = ConfigDict( extra="forbid", @@ -1392,6 +1425,33 @@ class Sampling(BaseModel): forceSamplingRate: Optional[SamplingRate] = None +class WebExternalClicksTableQueryResponse(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + columns: Optional[list] = None + error: Optional[str] = Field( + default=None, + description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", + ) + hasMore: Optional[bool] = None + hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") + limit: Optional[int] = None + modifiers: Optional[HogQLQueryModifiers] = Field( + default=None, description="Modifiers used when performing the query" + ) + offset: Optional[int] = None + query_status: Optional[QueryStatus] = Field( + default=None, description="Query status indicates whether next to the provided data, a query is still running." + ) + results: list + samplingRate: Optional[SamplingRate] = None + timings: Optional[list[QueryTiming]] = Field( + default=None, description="Measured timings for different parts of the query generation process" + ) + types: Optional[list] = None + + class WebGoalsQueryResponse(BaseModel): model_config = ConfigDict( extra="forbid", @@ -1768,6 +1828,7 @@ class CachedFunnelsQueryResponse(BaseModel): description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", ) hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") + isUdf: Optional[bool] = None is_cached: bool last_refresh: AwareDatetime modifiers: Optional[HogQLQueryModifiers] = Field( @@ -1971,6 +2032,42 @@ class CachedTrendsQueryResponse(BaseModel): ) +class CachedWebExternalClicksTableQueryResponse(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + cache_key: str + cache_target_age: Optional[AwareDatetime] = None + calculation_trigger: Optional[str] = Field( + default=None, description="What triggered the calculation of the query, leave empty if user/immediate" + ) + columns: Optional[list] = None + error: Optional[str] = Field( + default=None, + description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", + ) + hasMore: Optional[bool] = None + hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") + is_cached: bool + last_refresh: AwareDatetime + limit: Optional[int] = None + modifiers: Optional[HogQLQueryModifiers] = Field( + default=None, description="Modifiers used when performing the query" + ) + next_allowed_client_refresh: AwareDatetime + offset: Optional[int] = None + query_status: Optional[QueryStatus] = Field( + default=None, description="Query status indicates whether next to the provided data, a query is still running." + ) + results: list + samplingRate: Optional[SamplingRate] = None + timezone: str + timings: Optional[list[QueryTiming]] = Field( + default=None, description="Measured timings for different parts of the query generation process" + ) + types: Optional[list] = None + + class CachedWebGoalsQueryResponse(BaseModel): model_config = ConfigDict( extra="forbid", @@ -2244,7 +2341,7 @@ class Response4(BaseModel): types: Optional[list] = None -class Response5(BaseModel): +class Response6(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -2268,7 +2365,7 @@ class Response5(BaseModel): types: Optional[list] = None -class Response6(BaseModel): +class Response7(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -2295,7 +2392,7 @@ class Response6(BaseModel): types: Optional[list] = None -class Response7(BaseModel): +class Response8(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -2321,7 +2418,7 @@ class Response7(BaseModel): types: Optional[list] = None -class Response8(BaseModel): +class Response9(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -2346,6 +2443,22 @@ class Response8(BaseModel): ) +class Response10(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + insight: Literal["TRENDS"] = "TRENDS" + results: dict[str, ExperimentVariantTrendResult] + + +class Response11(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + insight: Literal["FUNNELS"] = "FUNNELS" + results: dict[str, ExperimentVariantFunnelResult] + + class DataWarehousePersonPropertyFilter(BaseModel): model_config = ConfigDict( extra="forbid", @@ -2375,6 +2488,7 @@ class DatabaseSchemaField(BaseModel): chain: Optional[list[Union[str, int]]] = None fields: Optional[list[str]] = None hogql_value: str + id: Optional[str] = None name: str schema_valid: bool table: Optional[str] = None @@ -2474,6 +2588,22 @@ class EventsQueryResponse(BaseModel): types: list[str] +class ExperimentResultFunnelQueryResponse(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + insight: Literal["FUNNELS"] = "FUNNELS" + results: dict[str, ExperimentVariantFunnelResult] + + +class ExperimentResultTrendQueryResponse(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + insight: Literal["TRENDS"] = "TRENDS" + results: dict[str, ExperimentVariantTrendResult] + + class BreakdownFilter1(BaseModel): model_config = ConfigDict( extra="forbid", @@ -2550,6 +2680,7 @@ class FunnelsQueryResponse(BaseModel): description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", ) hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") + isUdf: Optional[bool] = None modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) @@ -2960,7 +3091,7 @@ class QueryResponseAlternative10(BaseModel): types: Optional[list] = None -class QueryResponseAlternative11(BaseModel): +class QueryResponseAlternative12(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -2984,7 +3115,7 @@ class QueryResponseAlternative11(BaseModel): types: Optional[list] = None -class QueryResponseAlternative12(BaseModel): +class QueryResponseAlternative13(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -3011,7 +3142,7 @@ class QueryResponseAlternative12(BaseModel): types: Optional[list] = None -class QueryResponseAlternative13(BaseModel): +class QueryResponseAlternative14(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -3037,7 +3168,7 @@ class QueryResponseAlternative13(BaseModel): types: Optional[list] = None -class QueryResponseAlternative14(BaseModel): +class QueryResponseAlternative15(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -3062,7 +3193,7 @@ class QueryResponseAlternative14(BaseModel): ) -class QueryResponseAlternative15(BaseModel): +class QueryResponseAlternative16(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -3088,7 +3219,7 @@ class QueryResponseAlternative15(BaseModel): types: list[str] -class QueryResponseAlternative16(BaseModel): +class QueryResponseAlternative17(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -3115,7 +3246,7 @@ class QueryResponseAlternative16(BaseModel): types: list[str] -class QueryResponseAlternative17(BaseModel): +class QueryResponseAlternative18(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -3145,7 +3276,7 @@ class QueryResponseAlternative17(BaseModel): types: Optional[list] = Field(default=None, description="Types of returned columns") -class QueryResponseAlternative18(BaseModel): +class QueryResponseAlternative19(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -3169,7 +3300,7 @@ class QueryResponseAlternative18(BaseModel): ) -class QueryResponseAlternative19(BaseModel): +class QueryResponseAlternative20(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -3196,7 +3327,7 @@ class QueryResponseAlternative19(BaseModel): types: Optional[list] = None -class QueryResponseAlternative20(BaseModel): +class QueryResponseAlternative22(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -3220,7 +3351,7 @@ class QueryResponseAlternative20(BaseModel): types: Optional[list] = None -class QueryResponseAlternative21(BaseModel): +class QueryResponseAlternative23(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -3247,7 +3378,7 @@ class QueryResponseAlternative21(BaseModel): types: Optional[list] = None -class QueryResponseAlternative22(BaseModel): +class QueryResponseAlternative24(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -3273,7 +3404,7 @@ class QueryResponseAlternative22(BaseModel): types: Optional[list] = None -class QueryResponseAlternative23(BaseModel): +class QueryResponseAlternative25(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -3298,7 +3429,7 @@ class QueryResponseAlternative23(BaseModel): ) -class QueryResponseAlternative24(BaseModel): +class QueryResponseAlternative28(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -3320,7 +3451,7 @@ class QueryResponseAlternative24(BaseModel): ) -class QueryResponseAlternative25(BaseModel): +class QueryResponseAlternative29(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -3329,6 +3460,7 @@ class QueryResponseAlternative25(BaseModel): description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", ) hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") + isUdf: Optional[bool] = None modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) @@ -3341,7 +3473,7 @@ class QueryResponseAlternative25(BaseModel): ) -class QueryResponseAlternative27(BaseModel): +class QueryResponseAlternative31(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -3362,7 +3494,7 @@ class QueryResponseAlternative27(BaseModel): ) -class QueryResponseAlternative30(BaseModel): +class QueryResponseAlternative34(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -3561,6 +3693,24 @@ class TableSettings(BaseModel): columns: Optional[list[ChartAxis]] = None +class WebExternalClicksTableQuery(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + dateRange: Optional[DateRange] = None + filterTestAccounts: Optional[bool] = None + kind: Literal["WebExternalClicksTableQuery"] = "WebExternalClicksTableQuery" + limit: Optional[int] = None + modifiers: Optional[HogQLQueryModifiers] = Field( + default=None, description="Modifiers used when performing the query" + ) + properties: list[Union[EventPropertyFilter, PersonPropertyFilter, SessionPropertyFilter]] + response: Optional[WebExternalClicksTableQueryResponse] = None + sampling: Optional[Sampling] = None + stripQueryParams: Optional[bool] = None + useSessionsTable: Optional[bool] = None + + class WebGoalsQuery(BaseModel): model_config = ConfigDict( extra="forbid", @@ -4361,7 +4511,7 @@ class PropertyGroupFilterValue(BaseModel): ] -class QueryResponseAlternative26(BaseModel): +class QueryResponseAlternative30(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -4642,6 +4792,7 @@ class FunnelsFilter(BaseModel): funnelWindowIntervalUnit: Optional[FunnelConversionWindowTimeUnit] = FunnelConversionWindowTimeUnit.DAY hiddenLegendBreakdowns: Optional[list[str]] = None layout: Optional[FunnelLayout] = FunnelLayout.VERTICAL + useUdf: Optional[bool] = None class HasPropertiesNode(RootModel[Union[EventsNode, EventsQuery, PersonsNode]]): @@ -5173,7 +5324,7 @@ class LifecycleQuery(BaseModel): ) -class QueryResponseAlternative31(BaseModel): +class QueryResponseAlternative35(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -5202,26 +5353,30 @@ class QueryResponseAlternative( QueryResponseAlternative8, QueryResponseAlternative9, QueryResponseAlternative10, - QueryResponseAlternative11, QueryResponseAlternative12, QueryResponseAlternative13, QueryResponseAlternative14, - Any, QueryResponseAlternative15, + ExperimentResultTrendQueryResponse, + ExperimentResultFunnelQueryResponse, + Any, QueryResponseAlternative16, QueryResponseAlternative17, QueryResponseAlternative18, QueryResponseAlternative19, QueryResponseAlternative20, - QueryResponseAlternative21, QueryResponseAlternative22, QueryResponseAlternative23, QueryResponseAlternative24, QueryResponseAlternative25, QueryResponseAlternative26, QueryResponseAlternative27, + QueryResponseAlternative28, + QueryResponseAlternative29, QueryResponseAlternative30, QueryResponseAlternative31, + QueryResponseAlternative34, + QueryResponseAlternative35, ] ] ): @@ -5237,26 +5392,30 @@ class QueryResponseAlternative( QueryResponseAlternative8, QueryResponseAlternative9, QueryResponseAlternative10, - QueryResponseAlternative11, QueryResponseAlternative12, QueryResponseAlternative13, QueryResponseAlternative14, - Any, QueryResponseAlternative15, + ExperimentResultTrendQueryResponse, + ExperimentResultFunnelQueryResponse, + Any, QueryResponseAlternative16, QueryResponseAlternative17, QueryResponseAlternative18, QueryResponseAlternative19, QueryResponseAlternative20, - QueryResponseAlternative21, QueryResponseAlternative22, QueryResponseAlternative23, QueryResponseAlternative24, QueryResponseAlternative25, QueryResponseAlternative26, QueryResponseAlternative27, + QueryResponseAlternative28, + QueryResponseAlternative29, QueryResponseAlternative30, QueryResponseAlternative31, + QueryResponseAlternative34, + QueryResponseAlternative35, ] @@ -5275,6 +5434,19 @@ class DatabaseSchemaQueryResponse(BaseModel): ] +class ExperimentResultQuery(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + kind: Literal["ExperimentResultQuery"] = "ExperimentResultQuery" + modifiers: Optional[HogQLQueryModifiers] = Field( + default=None, description="Modifiers used when performing the query" + ) + response: Optional[Union[ExperimentResultTrendQueryResponse, ExperimentResultFunnelQueryResponse]] = None + source: Union[TrendsQuery, FunnelsQuery] + variants: list[str] + + class FunnelPathsFilter(BaseModel): model_config = ConfigDict( extra="forbid", @@ -5544,10 +5716,12 @@ class DataTableNode(BaseModel): Response2, Response3, Response4, - Response5, Response6, Response7, Response8, + Response9, + Response10, + Response11, ] ] = None showActions: Optional[bool] = Field(default=None, description="Show the kebab menu at the end of the row") @@ -5584,10 +5758,12 @@ class DataTableNode(BaseModel): HogQLQuery, WebOverviewQuery, WebStatsTableQuery, + WebExternalClicksTableQuery, WebTopClicksQuery, WebGoalsQuery, SessionAttributionExplorerQuery, ErrorTrackingQuery, + ExperimentResultQuery, ] = Field(..., description="Source of the events") @@ -5621,10 +5797,12 @@ class HogQLAutocomplete(BaseModel): HogQLAutocomplete, WebOverviewQuery, WebStatsTableQuery, + WebExternalClicksTableQuery, WebTopClicksQuery, WebGoalsQuery, SessionAttributionExplorerQuery, ErrorTrackingQuery, + ExperimentResultQuery, ] ] = Field(default=None, description="Query in whose context to validate.") startPosition: int = Field(..., description="Start position of the editor word") @@ -5662,10 +5840,12 @@ class HogQLMetadata(BaseModel): HogQLAutocomplete, WebOverviewQuery, WebStatsTableQuery, + WebExternalClicksTableQuery, WebTopClicksQuery, WebGoalsQuery, SessionAttributionExplorerQuery, ErrorTrackingQuery, + ExperimentResultQuery, ] ] = Field( default=None, @@ -5706,10 +5886,12 @@ class QueryRequest(BaseModel): HogQLAutocomplete, WebOverviewQuery, WebStatsTableQuery, + WebExternalClicksTableQuery, WebTopClicksQuery, WebGoalsQuery, SessionAttributionExplorerQuery, ErrorTrackingQuery, + ExperimentResultQuery, DataVisualizationNode, DataTableNode, SavedInsightNode, @@ -5753,10 +5935,12 @@ class QuerySchemaRoot( HogQLAutocomplete, WebOverviewQuery, WebStatsTableQuery, + WebExternalClicksTableQuery, WebTopClicksQuery, WebGoalsQuery, SessionAttributionExplorerQuery, ErrorTrackingQuery, + ExperimentResultQuery, DataVisualizationNode, DataTableNode, SavedInsightNode, @@ -5788,10 +5972,12 @@ class QuerySchemaRoot( HogQLAutocomplete, WebOverviewQuery, WebStatsTableQuery, + WebExternalClicksTableQuery, WebTopClicksQuery, WebGoalsQuery, SessionAttributionExplorerQuery, ErrorTrackingQuery, + ExperimentResultQuery, DataVisualizationNode, DataTableNode, SavedInsightNode, diff --git a/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr index 23d8c307578c4..cf557448bbd50 100644 --- a/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr +++ b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr @@ -17,6 +17,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -80,6 +81,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -143,6 +145,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -206,6 +209,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -269,6 +273,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -364,6 +369,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -581,6 +587,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -845,6 +852,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -908,6 +916,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -971,6 +980,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1034,6 +1044,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1097,6 +1108,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1160,6 +1172,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1223,6 +1236,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1318,6 +1332,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1619,6 +1634,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -1733,6 +1749,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -2169,6 +2186,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -2264,6 +2282,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -2378,6 +2397,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -2789,6 +2809,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -2884,6 +2905,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -3120,6 +3142,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -3372,6 +3395,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -3467,6 +3491,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -4293,6 +4318,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -4414,6 +4440,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -4844,6 +4871,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -4939,6 +4967,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -5450,6 +5479,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -5545,6 +5575,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -5986,6 +6017,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -6081,6 +6113,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -6590,6 +6623,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -6685,6 +6719,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", diff --git a/posthog/tasks/email.py b/posthog/tasks/email.py index 925dce44493d1..ee9229109b832 100644 --- a/posthog/tasks/email.py +++ b/posthog/tasks/email.py @@ -20,6 +20,7 @@ Team, User, ) +from posthog.models.utils import UUIDT from posthog.user_permissions import UserPermissions logger = structlog.get_logger(__name__) @@ -159,7 +160,7 @@ def send_fatal_plugin_error( def send_batch_export_run_failure( - batch_export_run_id: str, + batch_export_run_id: UUIDT, ) -> None: logger = structlog.get_logger(__name__) diff --git a/posthog/tasks/stop_surveys_reached_target.py b/posthog/tasks/stop_surveys_reached_target.py index 5432a45d84b19..dc8f99ee3cff0 100644 --- a/posthog/tasks/stop_surveys_reached_target.py +++ b/posthog/tasks/stop_surveys_reached_target.py @@ -1,16 +1,16 @@ from itertools import groupby from django.db.models import Q from django.utils import timezone -from uuid import UUID from datetime import datetime from posthog.clickhouse.client.connection import Workload from posthog.client import sync_execute from posthog.models import Survey +from posthog.models.utils import UUIDT def _get_surveys_response_counts( - surveys_ids: list[UUID], team_id: int, earliest_survey_creation_date: datetime + surveys_ids: list[UUIDT], team_id: int, earliest_survey_creation_date: datetime ) -> dict[str, int]: data = sync_execute( """ diff --git a/posthog/tasks/tasks.py b/posthog/tasks/tasks.py index f2bf138177a35..902d9dd7df8e9 100644 --- a/posthog/tasks/tasks.py +++ b/posthog/tasks/tasks.py @@ -8,6 +8,7 @@ from django.utils import timezone from prometheus_client import Gauge from redis import Redis +import requests from structlog import get_logger from posthog.clickhouse.client.limit import limit_concurrency, CeleryConcurrencyLimitExceeded @@ -180,11 +181,7 @@ def pg_row_count() -> None: "log_entries", ] -HEARTBEAT_EVENT_TO_INGESTION_LAG_METRIC = { - "heartbeat": "ingestion", - "heartbeat_buffer": "ingestion_buffer", - "heartbeat_api": "ingestion_api", -} +HEARTBEAT_EVENT_TO_INGESTION_LAG_METRIC = {"$heartbeat": "ingestion_api"} @shared_task(ignore_result=True) @@ -192,9 +189,8 @@ def ingestion_lag() -> None: from statshog.defaults.django import statsd from posthog.client import sync_execute + from posthog.models.team.team import Team - # Requires https://github.com/PostHog/posthog-heartbeat-plugin to be enabled on team 2 - # Note that it runs every minute, and we compare it with now(), so there's up to 60s delay query = """ SELECT event, date_diff('second', max(timestamp), now()) FROM events @@ -204,11 +200,13 @@ def ingestion_lag() -> None: GROUP BY event """ + team_ids = settings.INGESTION_LAG_METRIC_TEAM_IDS + try: results = sync_execute( query, { - "team_ids": settings.INGESTION_LAG_METRIC_TEAM_IDS, + "team_ids": team_ids, "events": list(HEARTBEAT_EVENT_TO_INGESTION_LAG_METRIC.keys()), }, ) @@ -226,6 +224,17 @@ def ingestion_lag() -> None: except: pass + for team in Team.objects.filter(pk__in=team_ids): + requests.post( + settings.SITE_URL + "/e", + json={ + "event": "$heartbeat", + "distinct_id": "posthog-celery-heartbeat", + "token": team.api_token, + "properties": {"$timestamp": timezone.now().isoformat()}, + }, + ) + @shared_task(ignore_result=True, queue=CeleryQueue.SESSION_REPLAY_GENERAL.value) def replay_count_metrics() -> None: diff --git a/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr b/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr index 1f6210fb4d933..a15676702daaa 100644 --- a/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr +++ b/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr @@ -88,6 +88,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -349,6 +350,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", diff --git a/posthog/tasks/update_survey_iteration.py b/posthog/tasks/update_survey_iteration.py index 5218a99010252..2c6096b19261b 100644 --- a/posthog/tasks/update_survey_iteration.py +++ b/posthog/tasks/update_survey_iteration.py @@ -58,7 +58,7 @@ def _get_targeting_flag(survey: Survey) -> ForeignKey | ForeignKey | Any: team=survey.team, created_by=survey.created_by, active=True, - key=survey.id, + key=str(survey.id), filters=user_submitted_dismissed_filter, ) new_flag.save() diff --git a/posthog/temporal/batch_exports/batch_exports.py b/posthog/temporal/batch_exports/batch_exports.py index 19658ec1a7b35..5fe76ff146649 100644 --- a/posthog/temporal/batch_exports/batch_exports.py +++ b/posthog/temporal/batch_exports/batch_exports.py @@ -412,7 +412,7 @@ async def start_batch_export_run(inputs: StartBatchExportRunInputs) -> BatchExpo (i.e. without running the insert activity), as there will be nothing to export. """ logger = await bind_temporal_worker_logger(team_id=inputs.team_id) - logger.info( + await logger.ainfo( "Starting batch export for range %s - %s", inputs.data_interval_start, inputs.data_interval_end, @@ -483,19 +483,19 @@ async def finish_batch_export_run(inputs: FinishBatchExportRunInputs) -> None: ) if batch_export_run.status == BatchExportRun.Status.FAILED_RETRYABLE: - logger.error("Batch export failed with error: %s", batch_export_run.latest_error) + await logger.aerror("Batch export failed with error: %s", batch_export_run.latest_error) elif batch_export_run.status == BatchExportRun.Status.FAILED: - logger.error("Batch export failed with non-recoverable error: %s", batch_export_run.latest_error) + await logger.aerror("Batch export failed with non-recoverable error: %s", batch_export_run.latest_error) from posthog.tasks.email import send_batch_export_run_failure try: await database_sync_to_async(send_batch_export_run_failure)(inputs.id) except Exception: - logger.exception("Failure email notification could not be sent") + await logger.aexception("Failure email notification could not be sent") else: - logger.info("Failure notification email for run %s has been sent", inputs.id) + await logger.ainfo("Failure notification email for run %s has been sent", inputs.id) is_over_failure_threshold = await check_if_over_failure_threshold( inputs.batch_export_id, @@ -512,10 +512,10 @@ async def finish_batch_export_run(inputs: FinishBatchExportRunInputs) -> None: # Pausing could error if the underlying schedule is deleted. # Our application logic should prevent that, but I want to log it in case it ever happens # as that would indicate a bug. - logger.exception("Batch export could not be automatically paused") + await logger.aexception("Batch export could not be automatically paused") else: if was_paused: - logger.warning( + await logger.awarning( "Batch export was automatically paused due to exceeding failure threshold and exhausting " "all automated retries." "The batch export can be unpaused after addressing any errors." @@ -526,10 +526,10 @@ async def finish_batch_export_run(inputs: FinishBatchExportRunInputs) -> None: inputs.batch_export_id, ) except Exception: - logger.exception("Ongoing backfills could not be automatically cancelled") + await logger.aexception("Ongoing backfills could not be automatically cancelled") else: if total_cancelled > 0: - logger.warning( + await logger.awarning( f"{total_cancelled} ongoing batch export backfill{'s' if total_cancelled > 1 else ''} " f"{'were' if total_cancelled > 1 else 'was'} cancelled due to exceeding failure threshold " " and exhausting all automated retries." @@ -537,10 +537,10 @@ async def finish_batch_export_run(inputs: FinishBatchExportRunInputs) -> None: ) elif batch_export_run.status == BatchExportRun.Status.CANCELLED: - logger.warning("Batch export was cancelled") + await logger.awarning("Batch export was cancelled") else: - logger.info( + await logger.ainfo( "Successfully finished exporting batch %s - %s", batch_export_run.data_interval_start, batch_export_run.data_interval_end, @@ -652,7 +652,7 @@ async def create_batch_export_backfill_model(inputs: CreateBatchExportBackfillIn model instance to represent them in our database. """ logger = await bind_temporal_worker_logger(team_id=inputs.team_id) - logger.info( + await logger.ainfo( "Creating historical export for batches in range %s - %s", inputs.start_at, inputs.end_at, @@ -685,13 +685,13 @@ async def update_batch_export_backfill_model_status(inputs: UpdateBatchExportBac logger = await bind_temporal_worker_logger(team_id=backfill.team_id) if backfill.status in (BatchExportBackfill.Status.FAILED, BatchExportBackfill.Status.FAILED_RETRYABLE): - logger.error("Historical export failed") + await logger.aerror("Historical export failed") elif backfill.status == BatchExportBackfill.Status.CANCELLED: - logger.warning("Historical export was cancelled.") + await logger.awarning("Historical export was cancelled.") else: - logger.info( + await logger.ainfo( "Successfully finished exporting historical batches in %s - %s", backfill.start_at, backfill.end_at, diff --git a/posthog/temporal/batch_exports/bigquery_batch_export.py b/posthog/temporal/batch_exports/bigquery_batch_export.py index 3e80dd51bdf3d..9da8c89e56e53 100644 --- a/posthog/temporal/batch_exports/bigquery_batch_export.py +++ b/posthog/temporal/batch_exports/bigquery_batch_export.py @@ -328,7 +328,7 @@ def bigquery_default_fields() -> list[BatchExportField]: async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs) -> RecordsCompleted: """Activity streams data from ClickHouse to BigQuery.""" logger = await bind_temporal_worker_logger(team_id=inputs.team_id, destination="BigQuery") - logger.info( + await logger.ainfo( "Batch exporting range %s - %s to BigQuery: %s.%s.%s", inputs.data_interval_start, inputs.data_interval_end, @@ -446,7 +446,7 @@ async def flush_to_bigquery( last: bool, error: Exception | None, ): - logger.debug( + await logger.adebug( "Loading %s records of size %s bytes", records_since_last_flush, bytes_since_last_flush, diff --git a/posthog/temporal/batch_exports/postgres_batch_export.py b/posthog/temporal/batch_exports/postgres_batch_export.py index 2fcb13b35d95a..89b4518940799 100644 --- a/posthog/temporal/batch_exports/postgres_batch_export.py +++ b/posthog/temporal/batch_exports/postgres_batch_export.py @@ -434,7 +434,7 @@ def get_postgres_fields_from_record_schema( async def insert_into_postgres_activity(inputs: PostgresInsertInputs) -> RecordsCompleted: """Activity streams data from ClickHouse to Postgres.""" logger = await bind_temporal_worker_logger(team_id=inputs.team_id, destination="PostgreSQL") - logger.info( + await logger.ainfo( "Batch exporting range %s - %s to PostgreSQL: %s.%s.%s", inputs.data_interval_start, inputs.data_interval_end, @@ -538,7 +538,7 @@ async def flush_to_postgres( last: bool, error: Exception | None, ): - logger.debug( + await logger.adebug( "Copying %s records of size %s bytes", records_since_last_flush, bytes_since_last_flush, diff --git a/posthog/temporal/batch_exports/redshift_batch_export.py b/posthog/temporal/batch_exports/redshift_batch_export.py index 9cc083b7fe3c3..8fff641bb3620 100644 --- a/posthog/temporal/batch_exports/redshift_batch_export.py +++ b/posthog/temporal/batch_exports/redshift_batch_export.py @@ -366,7 +366,7 @@ async def insert_into_redshift_activity(inputs: RedshiftInsertInputs) -> Records fields. """ logger = await bind_temporal_worker_logger(team_id=inputs.team_id, destination="Redshift") - logger.info( + await logger.ainfo( "Batch exporting range %s - %s to Redshift: %s.%s.%s", inputs.data_interval_start, inputs.data_interval_end, diff --git a/posthog/temporal/batch_exports/s3_batch_export.py b/posthog/temporal/batch_exports/s3_batch_export.py index 95d0d32f892fe..076fcd5cf773e 100644 --- a/posthog/temporal/batch_exports/s3_batch_export.py +++ b/posthog/temporal/batch_exports/s3_batch_export.py @@ -441,7 +441,7 @@ async def initialize_and_resume_multipart_upload(inputs: S3InsertInputs) -> tupl except IndexError: # This is the error we expect when no details as the sequence will be empty. interval_start = inputs.data_interval_start - logger.debug( + await logger.adebug( "Did not receive details from previous activity Execution. Export will start from the beginning %s", interval_start, ) @@ -449,12 +449,12 @@ async def initialize_and_resume_multipart_upload(inputs: S3InsertInputs) -> tupl # We still start from the beginning, but we make a point to log unexpected errors. # Ideally, any new exceptions should be added to the previous block after the first time and we will never land here. interval_start = inputs.data_interval_start - logger.warning( + await logger.awarning( "Did not receive details from previous activity Execution due to an unexpected error. Export will start from the beginning %s", interval_start, ) else: - logger.info( + await logger.ainfo( "Received details from previous activity. Export will attempt to resume from %s", interval_start, ) @@ -464,7 +464,7 @@ async def initialize_and_resume_multipart_upload(inputs: S3InsertInputs) -> tupl # Even if we receive details we cannot resume a brotli compressed upload as we have lost the compressor state. interval_start = inputs.data_interval_start - logger.info( + await logger.ainfo( f"Export will start from the beginning as we are using brotli compression: %s", interval_start, ) @@ -502,7 +502,7 @@ async def insert_into_s3_activity(inputs: S3InsertInputs) -> RecordsCompleted: files. """ logger = await bind_temporal_worker_logger(team_id=inputs.team_id, destination="S3") - logger.info( + await logger.ainfo( "Batch exporting range %s - %s to S3: %s", inputs.data_interval_start, inputs.data_interval_end, @@ -557,14 +557,14 @@ async def flush_to_s3( error: Exception | None, ): if error is not None: - logger.debug("Error while writing part %d", s3_upload.part_number + 1, exc_info=error) - logger.warn( + await logger.adebug("Error while writing part %d", s3_upload.part_number + 1, exc_info=error) + await logger.awarn( "An error was detected while writing part %d. Partial part will not be uploaded in case it can be retried.", s3_upload.part_number + 1, ) return - logger.debug( + await logger.adebug( "Uploading %s part %s containing %s records with size %s bytes", "last " if last else "", s3_upload.part_number + 1, diff --git a/posthog/temporal/batch_exports/snowflake_batch_export.py b/posthog/temporal/batch_exports/snowflake_batch_export.py index ad0afb833e176..82408017fc326 100644 --- a/posthog/temporal/batch_exports/snowflake_batch_export.py +++ b/posthog/temporal/batch_exports/snowflake_batch_export.py @@ -540,7 +540,7 @@ async def insert_into_snowflake_activity(inputs: SnowflakeInsertInputs) -> Recor TODO: We're using JSON here, it's not the most efficient way to do this. """ logger = await bind_temporal_worker_logger(team_id=inputs.team_id, destination="Snowflake") - logger.info( + await logger.ainfo( "Batch exporting range %s - %s to Snowflake: %s.%s.%s", inputs.data_interval_start, inputs.data_interval_end, diff --git a/posthog/temporal/common/utils.py b/posthog/temporal/common/utils.py index e20062db9cc23..fc8a77cadea81 100644 --- a/posthog/temporal/common/utils.py +++ b/posthog/temporal/common/utils.py @@ -1,5 +1,5 @@ -import collections.abc import abc +import collections.abc import dataclasses import datetime as dt import typing @@ -126,23 +126,23 @@ async def should_resume_from_activity_heartbeat( except NotEnoughHeartbeatValuesError: heartbeat_details = None received = False - logger.warning("Details from previous activity execution did not contain the expected amount of values") + await logger.awarning("Details from previous activity execution did not contain the expected amount of values") except HeartbeatParseError: heartbeat_details = None received = False - logger.warning("Details from previous activity execution could not be parsed.") + await logger.awarning("Details from previous activity execution could not be parsed.") except Exception: # We should start from the beginning, but we make a point to log unexpected errors. # Ideally, any new exceptions should be added to the previous blocks after the first time and we will never land here. heartbeat_details = None received = False - logger.exception("Did not receive details from previous activity Execution due to an unexpected error") + await logger.aexception("Did not receive details from previous activity Execution due to an unexpected error") else: received = True - logger.debug( + await logger.adebug( f"Received details from previous activity: {heartbeat_details}", ) diff --git a/posthog/temporal/data_imports/pipelines/pipeline.py b/posthog/temporal/data_imports/pipelines/pipeline.py index 0dbfd8f5c3fd7..03e57174f0527 100644 --- a/posthog/temporal/data_imports/pipelines/pipeline.py +++ b/posthog/temporal/data_imports/pipelines/pipeline.py @@ -96,9 +96,7 @@ def _create_pipeline(self): destination = self._get_destination() return dlt.pipeline( - pipeline_name=pipeline_name, - destination=destination, - dataset_name=self.inputs.dataset_name, + pipeline_name=pipeline_name, destination=destination, dataset_name=self.inputs.dataset_name, progress="log" ) async def _prepare_s3_files_for_querying(self, file_uris: list[str]): @@ -254,7 +252,7 @@ def _run(self) -> dict[str, int]: else: self.logger.info("No table_counts, skipping validate_schema_and_update_table") - # Delete local state from the file system + # Cleanup: delete local state from the file system pipeline.drop() return dict(total_counts) diff --git a/posthog/temporal/data_imports/pipelines/schemas.py b/posthog/temporal/data_imports/pipelines/schemas.py index 0acd00e8bd6f3..15214f939b78a 100644 --- a/posthog/temporal/data_imports/pipelines/schemas.py +++ b/posthog/temporal/data_imports/pipelines/schemas.py @@ -17,6 +17,11 @@ INCREMENTAL_ENDPOINTS as SALESFORCE_INCREMENTAL_ENDPOINTS, INCREMENTAL_FIELDS as SALESFORCE_INCREMENTAL_FIELDS, ) +from posthog.temporal.data_imports.pipelines.vitally.settings import ( + ENDPOINTS as VITALLY_ENDPOINTS, + INCREMENTAL_ENDPOINTS as VITALLY_INCREMENTAL_ENDPOINTS, + INCREMENTAL_FIELDS as VITALLY_INCREMENTAL_FIELDS, +) PIPELINE_TYPE_SCHEMA_DEFAULT_MAPPING = { ExternalDataSource.Type.STRIPE: STRIPE_ENDPOINTS, @@ -29,6 +34,7 @@ ExternalDataSource.Type.SALESFORCE: SALESFORCE_ENDPOINTS, ExternalDataSource.Type.MYSQL: (), ExternalDataSource.Type.MSSQL: (), + ExternalDataSource.Type.VITALLY: VITALLY_ENDPOINTS, } PIPELINE_TYPE_INCREMENTAL_ENDPOINTS_MAPPING = { @@ -40,6 +46,7 @@ ExternalDataSource.Type.SALESFORCE: SALESFORCE_INCREMENTAL_ENDPOINTS, ExternalDataSource.Type.MYSQL: (), ExternalDataSource.Type.MSSQL: (), + ExternalDataSource.Type.VITALLY: VITALLY_INCREMENTAL_ENDPOINTS, } PIPELINE_TYPE_INCREMENTAL_FIELDS_MAPPING: dict[ExternalDataSource.Type, dict[str, list[IncrementalField]]] = { @@ -51,4 +58,5 @@ ExternalDataSource.Type.SALESFORCE: SALESFORCE_INCREMENTAL_FIELDS, ExternalDataSource.Type.MYSQL: {}, ExternalDataSource.Type.MSSQL: {}, + ExternalDataSource.Type.VITALLY: VITALLY_INCREMENTAL_FIELDS, } diff --git a/posthog/temporal/data_imports/pipelines/vitally/__init__.py b/posthog/temporal/data_imports/pipelines/vitally/__init__.py new file mode 100644 index 0000000000000..8c526017db38d --- /dev/null +++ b/posthog/temporal/data_imports/pipelines/vitally/__init__.py @@ -0,0 +1,360 @@ +import base64 +from dateutil import parser +from typing import Any, Optional +import dlt +from dlt.sources.helpers.rest_client.paginators import BasePaginator +from dlt.sources.helpers.requests import Response, Request +import requests +from posthog.temporal.data_imports.pipelines.rest_source import RESTAPIConfig, rest_api_resources +from posthog.temporal.data_imports.pipelines.rest_source.typing import EndpointResource + + +def get_resource(name: str, is_incremental: bool) -> EndpointResource: + resources: dict[str, EndpointResource] = { + "Organizations": { + "name": "Organizations", + "table_name": "organizations", + "primary_key": "id", + "write_disposition": { + "disposition": "merge", + "strategy": "upsert", + } + if is_incremental + else "replace", + "endpoint": { + "data_selector": "results", + "path": "/resources/organizations", + "params": { + "limit": 100, + "sortBy": "updatedAt", + "updatedAt": { + "type": "incremental", + "cursor_path": "updatedAt", + "initial_value": "1970-01-01", # type: ignore + "convert": lambda x: parser.parse(x).timestamp(), + } + if is_incremental + else None, + }, + }, + "table_format": "delta", + }, + "Accounts": { + "name": "Accounts", + "table_name": "accounts", + "primary_key": "id", + "write_disposition": { + "disposition": "merge", + "strategy": "upsert", + } + if is_incremental + else "replace", + "endpoint": { + "data_selector": "results", + "path": "/resources/accounts", + "params": { + "limit": 100, + "sortBy": "updatedAt", + "updatedAt": { + "type": "incremental", + "cursor_path": "updatedAt", + "initial_value": "1970-01-01", # type: ignore + "convert": lambda x: parser.parse(x).timestamp(), + } + if is_incremental + else None, + }, + }, + "table_format": "delta", + }, + "Users": { + "name": "Users", + "table_name": "users", + "primary_key": "id", + "write_disposition": { + "disposition": "merge", + "strategy": "upsert", + } + if is_incremental + else "replace", + "endpoint": { + "data_selector": "results", + "path": "/resources/users", + "params": { + "limit": 100, + "sortBy": "updatedAt", + "updatedAt": { + "type": "incremental", + "cursor_path": "updatedAt", + "initial_value": "1970-01-01", # type: ignore + "convert": lambda x: parser.parse(x).timestamp(), + } + if is_incremental + else None, + }, + }, + "table_format": "delta", + }, + "Conversations": { + "name": "Conversations", + "table_name": "conversations", + "primary_key": "id", + "write_disposition": { + "disposition": "merge", + "strategy": "upsert", + } + if is_incremental + else "replace", + "endpoint": { + "data_selector": "results", + "path": "/resources/conversations", + "params": { + "limit": 100, + "sortBy": "updatedAt", + "updatedAt": { + "type": "incremental", + "cursor_path": "updatedAt", + "initial_value": "1970-01-01", # type: ignore + "convert": lambda x: parser.parse(x).timestamp(), + } + if is_incremental + else None, + }, + }, + "table_format": "delta", + }, + "Notes": { + "name": "Notes", + "table_name": "notes", + "primary_key": "id", + "write_disposition": { + "disposition": "merge", + "strategy": "upsert", + } + if is_incremental + else "replace", + "endpoint": { + "data_selector": "results", + "path": "/resources/notes", + "params": { + "limit": 100, + "sortBy": "updatedAt", + "updatedAt": { + "type": "incremental", + "cursor_path": "updatedAt", + "initial_value": "1970-01-01", # type: ignore + "convert": lambda x: parser.parse(x).timestamp(), + } + if is_incremental + else None, + }, + }, + "table_format": "delta", + }, + "Projects": { + "name": "Projects", + "table_name": "projects", + "primary_key": "id", + "write_disposition": { + "disposition": "merge", + "strategy": "upsert", + } + if is_incremental + else "replace", + "endpoint": { + "data_selector": "results", + "path": "/resources/projects", + "params": { + "limit": 100, + "sortBy": "updatedAt", + "updatedAt": { + "type": "incremental", + "cursor_path": "updatedAt", + "initial_value": "1970-01-01", # type: ignore + "convert": lambda x: parser.parse(x).timestamp(), + } + if is_incremental + else None, + }, + }, + "table_format": "delta", + }, + "Tasks": { + "name": "Tasks", + "table_name": "tasks", + "primary_key": "id", + "write_disposition": { + "disposition": "merge", + "strategy": "upsert", + } + if is_incremental + else "replace", + "endpoint": { + "data_selector": "results", + "path": "/resources/tasks", + "params": { + "limit": 100, + "sortBy": "updatedAt", + "updatedAt": { + "type": "incremental", + "cursor_path": "updatedAt", + "initial_value": "1970-01-01", # type: ignore + "convert": lambda x: parser.parse(x).timestamp(), + } + if is_incremental + else None, + }, + }, + "table_format": "delta", + }, + "NPS_Responses": { + "name": "NPS_Responses", + "table_name": "nps_responses", + "primary_key": "id", + "write_disposition": { + "disposition": "merge", + "strategy": "upsert", + } + if is_incremental + else "replace", + "endpoint": { + "data_selector": "results", + "path": "/resources/npsResponses", + "params": { + "limit": 100, + "sortBy": "updatedAt", + "updatedAt": { + "type": "incremental", + "cursor_path": "updatedAt", + "initial_value": "1970-01-01", # type: ignore + "convert": lambda x: parser.parse(x).timestamp(), + } + if is_incremental + else None, + }, + }, + "table_format": "delta", + }, + "Custom_Objects": { + "name": "Custom_Objects", + "table_name": "custom_objects", + "primary_key": "id", + "write_disposition": { + "disposition": "merge", + "strategy": "upsert", + } + if is_incremental + else "replace", + "endpoint": { + "data_selector": "results", + "path": "/resources/customObjects", + "params": { + "limit": 100, + "sortBy": "updatedAt", + "updatedAt": { + "type": "incremental", + "cursor_path": "updatedAt", + "initial_value": "1970-01-01", # type: ignore + "convert": lambda x: parser.parse(x).timestamp(), + } + if is_incremental + else None, + }, + }, + "table_format": "delta", + }, + } + + return resources[name] + + +class VitallyPaginator(BasePaginator): + def __init__(self) -> None: + super().__init__() + + def update_state(self, response: Response, data: Optional[list[Any]] = None) -> None: + res = response.json() + + current_source = dlt.current.get_source() + resources = current_source.resources + current_resource = next(iter(resources.values())) + incremental = current_resource.incremental.incremental + + self._cursor = None + + if not res: + self._has_next_page = False + return + + if incremental: + updated_at_str = res["results"][0]["updatedAt"] + updated_at = parser.parse(updated_at_str).timestamp() + start_value = parser.parse(incremental.start_value).timestamp() + + if start_value >= updated_at: + self._has_next_page = False + return + + if res["next"]: + self._has_next_page = True + self._cursor = res["next"] + else: + self._has_next_page = False + + def update_request(self, request: Request) -> None: + if request.params is None: + request.params = {} + + request.params["from"] = self._cursor + + +def get_base_url(region: str, subdomain: Optional[str]) -> str: + if region == "US" and subdomain: + return f"https://{subdomain}.rest.vitally.io/" + + return "https://rest.vitally-eu.io/" + + +@dlt.source(max_table_nesting=0) +def vitally_source( + secret_token: str, + region: str, + subdomain: Optional[str], + endpoint: str, + team_id: int, + job_id: str, + is_incremental: bool = False, +): + config: RESTAPIConfig = { + "client": { + "base_url": get_base_url(region, subdomain), + "auth": { + "type": "http_basic", + "username": secret_token, + "password": "", + }, + "paginator": VitallyPaginator(), + }, + "resource_defaults": { + "primary_key": "id", + "write_disposition": { + "disposition": "merge", + "strategy": "upsert", + } + if is_incremental + else "replace", + }, + "resources": [get_resource(endpoint, is_incremental)], + } + + yield from rest_api_resources(config, team_id, job_id) + + +def validate_credentials(secret_token: str, region: str, subdomain: Optional[str]) -> bool: + basic_token = base64.b64encode(f"{secret_token}:".encode("ascii")).decode("ascii") + res = requests.get( + f"{get_base_url(region, subdomain)}resources/users?limit=1", + headers={"Authorization": f"Basic {basic_token}"}, + ) + + return res.status_code == 200 diff --git a/posthog/temporal/data_imports/pipelines/vitally/settings.py b/posthog/temporal/data_imports/pipelines/vitally/settings.py new file mode 100644 index 0000000000000..a16d9565f5d1c --- /dev/null +++ b/posthog/temporal/data_imports/pipelines/vitally/settings.py @@ -0,0 +1,108 @@ +from posthog.warehouse.types import IncrementalField, IncrementalFieldType + +ENDPOINTS = ( + "Organizations", + "Accounts", + "Users", + "Conversations", + "Notes", + "Projects", + "Tasks", + "NPS_Responses", + "Custom_Objects", +) + +INCREMENTAL_ENDPOINTS = ( + "Organizations", + "Accounts", + "Users", + "Conversations", + "Notes", + "Projects", + "Tasks", + "NPS_Responses", + "Custom_Objects", +) + +INCREMENTAL_FIELDS: dict[str, list[IncrementalField]] = { + "Organizations": [ + { + "label": "updated_at", + "type": IncrementalFieldType.DateTime, + "field": "updated_at", + "field_type": IncrementalFieldType.DateTime, + } + ], + "Accounts": [ + { + "label": "updated_at", + "type": IncrementalFieldType.DateTime, + "field": "updated_at", + "field_type": IncrementalFieldType.DateTime, + } + ], + "Users": [ + { + "label": "updated_at", + "type": IncrementalFieldType.DateTime, + "field": "updated_at", + "field_type": IncrementalFieldType.DateTime, + } + ], + "Conversations": [ + { + "label": "updated_at", + "type": IncrementalFieldType.DateTime, + "field": "updated_at", + "field_type": IncrementalFieldType.DateTime, + } + ], + "Notes": [ + { + "label": "updated_at", + "type": IncrementalFieldType.DateTime, + "field": "updated_at", + "field_type": IncrementalFieldType.DateTime, + } + ], + "Projects": [ + { + "label": "updated_at", + "type": IncrementalFieldType.DateTime, + "field": "updated_at", + "field_type": IncrementalFieldType.DateTime, + } + ], + "Tasks": [ + { + "label": "updated_at", + "type": IncrementalFieldType.DateTime, + "field": "updated_at", + "field_type": IncrementalFieldType.DateTime, + } + ], + "NPS_Responses": [ + { + "label": "updated_at", + "type": IncrementalFieldType.DateTime, + "field": "updated_at", + "field_type": IncrementalFieldType.DateTime, + } + ], + "Custom_Fields": [ + { + "label": "updated_at", + "type": IncrementalFieldType.DateTime, + "field": "updated_at", + "field_type": IncrementalFieldType.DateTime, + } + ], + "Custom_Objects": [ + { + "label": "updated_at", + "type": IncrementalFieldType.DateTime, + "field": "updated_at", + "field_type": IncrementalFieldType.DateTime, + } + ], +} diff --git a/posthog/temporal/data_imports/workflow_activities/import_data.py b/posthog/temporal/data_imports/workflow_activities/import_data.py index 6ce4237f53711..73706e1191589 100644 --- a/posthog/temporal/data_imports/workflow_activities/import_data.py +++ b/posthog/temporal/data_imports/workflow_activities/import_data.py @@ -283,6 +283,27 @@ async def import_data_activity(inputs: ImportDataActivityInputs): is_incremental=schema.is_incremental, ) + return await _run( + job_inputs=job_inputs, + source=source, + logger=logger, + inputs=inputs, + schema=schema, + reset_pipeline=reset_pipeline, + ) + elif model.pipeline.source_type == ExternalDataSource.Type.VITALLY: + from posthog.temporal.data_imports.pipelines.vitally import vitally_source + + source = vitally_source( + secret_token=model.pipeline.job_inputs.get("secret_token"), + region=model.pipeline.job_inputs.get("region"), + subdomain=model.pipeline.job_inputs.get("subdomain"), + endpoint=schema.name, + team_id=inputs.team_id, + job_id=inputs.run_id, + is_incremental=schema.is_incremental, + ) + return await _run( job_inputs=job_inputs, source=source, diff --git a/posthog/test/__snapshots__/test_feature_flag.ambr b/posthog/test/__snapshots__/test_feature_flag.ambr index 71db78a074e14..3865a9cce9ff6 100644 --- a/posthog/test/__snapshots__/test_feature_flag.ambr +++ b/posthog/test/__snapshots__/test_feature_flag.ambr @@ -141,6 +141,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -310,6 +311,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", @@ -643,6 +645,7 @@ "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", "posthog_team"."autocapture_exceptions_opt_in", "posthog_team"."autocapture_exceptions_errors_to_ignore", "posthog_team"."session_recording_opt_in", diff --git a/posthog/test/test_team.py b/posthog/test/test_team.py index 1a54a17dd0e41..076fc21e5fe34 100644 --- a/posthog/test/test_team.py +++ b/posthog/test/test_team.py @@ -72,6 +72,7 @@ def test_team_has_expected_defaults(self): self.assertEqual(team.data_attributes, ["data-attr"]) self.assertEqual(team.autocapture_exceptions_opt_in, None) self.assertEqual(team.autocapture_web_vitals_opt_in, None) + self.assertEqual(team.autocapture_web_vitals_allowed_metrics, None) self.assertEqual(team.autocapture_exceptions_errors_to_ignore, None) def test_create_team_with_test_account_filters(self): diff --git a/posthog/test/user_scripts/__init__.py b/posthog/test/user_scripts/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/posthog/test/user_scripts/test_aggregate_funnel.py b/posthog/test/user_scripts/test_aggregate_funnel.py new file mode 100644 index 0000000000000..2b20929002f83 --- /dev/null +++ b/posthog/test/user_scripts/test_aggregate_funnel.py @@ -0,0 +1,4206 @@ +from typing import Any + +from posthog.user_scripts.aggregate_funnel import calculate_funnel_from_user_events + + +def test(): + y = [ + [(1577973600.0, "", [1]), (1577980800.0, "", [2]), (1577984400.0, "", [3])], + [(1577880000.0, "", [1]), (1577883600.0, "", [2]), (1577890800.0, "", [3])], + [(1577973600.0, "", [1]), (1577980800.0, "", [2])], + ] + + for x in y: + calculate_funnel_from_user_events(3, 1209600, "first_touch", "strict", [""], x) + + +def test2(): + a: list[Any] = [ + [(1720051532.484019, [], [1, 2, 3, 4, 5, 6])], + [(1720105713.331995, [], [1, 2, 3, 4, 5, 6])], + [(1720329565.847159, [], [1, 2, 3, 4, 5, 6])], + [ + (1720186008.567886, [], [1, 2, 3, 4, 5, 6]), + (1720326697.522923, [], [1, 2, 3, 4, 5, 6]), + (1720482974.426314, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720327526.250804, [], [1, 2, 3, 4, 5, 6]), (1720497558.23414, [], [1, 2, 3, 4, 5, 6])], + [ + (1719979738.339271, [], [1, 2, 3, 4, 5, 6]), + (1720025384.961105, [], [1, 2, 3, 4, 5, 6]), + (1720504618.55439, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720136408.619257, [], [1, 2, 3, 4, 5, 6]), + (1720136458.666712, [], [1, 2, 3, 4, 5, 6]), + (1720136460.776795, [], [1, 2, 3, 4, 5, 6]), + (1720136463.761667, [], [1, 2, 3, 4, 5, 6]), + (1720136465.813823, [], [1, 2, 3, 4, 5, 6]), + (1720153490.167176, [], [1, 2, 3, 4, 5, 6]), + (1720153611.687424, [], [1, 2, 3, 4, 5, 6]), + (1720153613.813758, [], [1, 2, 3, 4, 5, 6]), + (1720221238.819741, [], [1, 2, 3, 4, 5, 6]), + (1720221389.412602, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720234125.717526, [], [1, 2, 3, 4, 5, 6])], + [ + (1720245095.229565, [], [1, 2, 3, 4, 5, 6]), + (1720396821.910578, [], [1, 2, 3, 4, 5, 6]), + (1720502554.801179, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720237286.585886, [], [1, 2, 3, 4, 5, 6]), (1720492842.0014, [], [1, 2, 3, 4, 5, 6])], + [(1720145259.463577, [], [1, 2, 3, 4, 5, 6])], + [(1720173037.951133, [], [1, 2, 3, 4, 5, 6]), (1720398629.834351, [], [1, 2, 3, 4, 5, 6])], + [(1720576515.470242, [], [1, 2, 3, 4, 5, 6])], + [(1720488634.248776, [], [1, 2, 3, 4, 5, 6])], + [ + (1719966672.507604, [], [1, 2, 3, 4, 5, 6]), + (1720379305.230415, [], [1, 2, 3, 4, 5, 6]), + (1720485725.30467, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720056848.984567, [], [1, 2, 3, 4, 5, 6]), + (1720234634.97164, [], [1, 2, 3, 4, 5, 6]), + (1720326372.083307, [], [1, 2, 3, 4, 5, 6]), + (1720487169.130815, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719979630.05111, [], [1, 2, 3, 4, 5, 6])], + [(1720067082.599895, [], [1, 2, 3, 4, 5, 6])], + [(1720065455.678956, [], [1, 2, 3, 4, 5, 6])], + [(1720141594.235645, [], [1, 2, 3, 4, 5, 6]), (1720479638.868071, [], [1, 2, 3, 4, 5, 6])], + [(1720172558.775714, [], [1, 2, 3, 4, 5, 6]), (1720589944.987293, [], [1, 2, 3, 4, 5, 6])], + [(1720240665.403432, [], [1, 2, 3, 4, 5, 6]), (1720403456.771406, [], [1, 2, 3, 4, 5, 6])], + [ + (1720151433.593775, [], [1, 2, 3, 4, 5, 6]), + (1720397705.729741, [], [1, 2, 3, 4, 5, 6]), + (1720407937.654196, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720063019.413544, [], [1, 2, 3, 4, 5, 6]), + (1720230670.007217, [], [1, 2, 3, 4, 5, 6]), + (1720572529.432945, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720062676.566511, [], [1, 2, 3, 4, 5, 6]), + (1720062768.411832, [], [1, 2, 3, 4, 5, 6]), + (1720062770.476807, [], [1, 2, 3, 4, 5, 6]), + (1720062771.394614, [], [1, 2, 3, 4, 5, 6]), + (1720156065.434007, [], [1, 2, 3, 4, 5, 6]), + (1720156180.339675, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720053274.311851, [], [1, 2, 3, 4, 5, 6]), (1720574916.370766, [], [1, 2, 3, 4, 5, 6])], + [(1720403600.103166, [], [1, 2, 3, 4, 5, 6])], + [(1720070524.509752, [], [1, 2, 3, 4, 5, 6]), (1720330735.128105, [], [1, 2, 3, 4, 5, 6])], + [ + (1719980823.099161, [], [1, 2, 3, 4, 5, 6]), + (1720109783.667678, [], [1, 2, 3, 4, 5, 6]), + (1720488536.75761, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720120539.020908, [], [1, 2, 3, 4, 5, 6]), + (1720235556.263511, [], [1, 2, 3, 4, 5, 6]), + (1720404531.8727, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720461710.602542, [], [1, 2, 3, 4, 5, 6])], + [(1720142147.27027, [], [1, 2, 3, 4, 5, 6]), (1720463509.177443, [], [1, 2, 3, 4, 5, 6])], + [(1720609249.094945, [], [1, 2, 3, 4, 5, 6])], + [ + (1720061653.09558, [], [1, 2, 3, 4, 5, 6]), + (1720331923.364924, [], [1, 2, 3, 4, 5, 6]), + (1720493879.336969, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719981455.944035, [], [1, 2, 3, 4, 5, 6]), + (1719981517.705732, [], [1, 2, 3, 4, 5, 6]), + (1719994503.81365, [], [1, 2, 3, 4, 5, 6]), + (1719994621.6397, [], [1, 2, 3, 4, 5, 6]), + (1719994623.698368, [], [1, 2, 3, 4, 5, 6]), + (1719994627.578717, [], [1, 2, 3, 4, 5, 6]), + (1719994629.663136, [], [1, 2, 3, 4, 5, 6]), + (1719994631.068061, [], [1, 2, 3, 4, 5, 6]), + (1719994633.142381, [], [1, 2, 3, 4, 5, 6]), + (1720027463.767433, [], [1, 2, 3, 4, 5, 6]), + (1720027502.563106, [], [1, 2, 3, 4, 5, 6]), + (1720027504.670674, [], [1, 2, 3, 4, 5, 6]), + (1720057341.723675, [], [1, 2, 3, 4, 5, 6]), + (1720057343.781939, [], [1, 2, 3, 4, 5, 6]), + (1720145087.601179, [], [1, 2, 3, 4, 5, 6]), + (1720145089.680587, [], [1, 2, 3, 4, 5, 6]), + (1720243008.749524, [], [1, 2, 3, 4, 5, 6]), + (1720243068.439551, [], [1, 2, 3, 4, 5, 6]), + (1720318425.097956, [], [1, 2, 3, 4, 5, 6]), + (1720318427.16319, [], [1, 2, 3, 4, 5, 6]), + (1720318432.221956, [], [1, 2, 3, 4, 5, 6]), + (1720318434.329525, [], [1, 2, 3, 4, 5, 6]), + (1720418148.778433, [], [1, 2, 3, 4, 5, 6]), + (1720418150.861104, [], [1, 2, 3, 4, 5, 6]), + (1720488202.399436, [], [1, 2, 3, 4, 5, 6]), + (1720488212.260625, [], [1, 2, 3, 4, 5, 6]), + (1720488214.365566, [], [1, 2, 3, 4, 5, 6]), + (1720572393.815712, [], [1, 2, 3, 4, 5, 6]), + (1720613041.916708, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720057097.342555, [], [1, 2, 3, 4, 5, 6]), + (1720317039.904735, [], [1, 2, 3, 4, 5, 6]), + (1720483178.967836, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720181661.187285, [], [1, 2, 3, 4, 5, 6])], + [(1720199552.174104, [], [1, 2, 3, 4, 5, 6]), (1720568803.062761, [], [1, 2, 3, 4, 5, 6])], + [ + (1720247391.136136, [], [1, 2, 3, 4, 5, 6]), + (1720410696.088339, [], [1, 2, 3, 4, 5, 6]), + (1720599399.171422, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720312357.61565, [], [1, 2, 3, 4, 5, 6])], + [(1720052008.103911, [], [1, 2, 3, 4, 5, 6]), (1720400141.042944, [], [1, 2, 3, 4, 5, 6])], + [(1720210751.331903, [], [1, 2, 3, 4, 5, 6]), (1720503558.839248, [], [1, 2, 3, 4, 5, 6])], + [ + (1720241352.747626, [], [1, 2, 3, 4, 5, 6]), + (1720321677.766712, [], [1, 2, 3, 4, 5, 6]), + (1720409706.122052, [], [1, 2, 3, 4, 5, 6]), + (1720519728.980875, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719977467.931514, [], [1, 2, 3, 4, 5, 6]), (1720568695.132969, [], [1, 2, 3, 4, 5, 6])], + [(1720071302.148667, [], [1, 2, 3, 4, 5, 6]), (1720238096.092618, [], [1, 2, 3, 4, 5, 6])], + [(1720057437.769059, [], [1, 2, 3, 4, 5, 6])], + [ + (1720221473.506037, [], [1, 2, 3, 4, 5, 6]), + (1720348129.55283, [], [1, 2, 3, 4, 5, 6]), + (1720482938.000889, [], [1, 2, 3, 4, 5, 6]), + (1720576755.035308, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720235902.362301, [], [1, 2, 3, 4, 5, 6])], + [(1720024782.723245, [], [1, 2, 3, 4, 5, 6])], + [(1720070158.75827, [], [1, 2, 3, 4, 5, 6])], + [ + (1720000651.858702, [], [1, 2, 3, 4, 5, 6]), + (1720244645.395695, [], [1, 2, 3, 4, 5, 6]), + (1720411107.259775, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720191076.938109, [], [1, 2, 3, 4, 5, 6]), + (1720322967.081356, [], [1, 2, 3, 4, 5, 6]), + (1720323158.146239, [], [1, 2, 3, 4, 5, 6]), + (1720323172.234517, [], [1, 2, 3, 4, 5, 6]), + (1720323206.302768, [], [1, 2, 3, 4, 5, 6]), + (1720323313.146535, [], [1, 2, 3, 4, 5, 6]), + (1720323364.511129, [], [1, 2, 3, 4, 5, 6]), + (1720323458.282407, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720065260.493236, [], [1, 2, 3, 4, 5, 6]), + (1720065334.608797, [], [1, 2, 3, 4, 5, 6]), + (1720141650.234015, [], [1, 2, 3, 4, 5, 6]), + (1720141749.547675, [], [1, 2, 3, 4, 5, 6]), + (1720141751.641012, [], [1, 2, 3, 4, 5, 6]), + (1720154278.705276, [], [1, 2, 3, 4, 5, 6]), + (1720154280.760532, [], [1, 2, 3, 4, 5, 6]), + (1720229499.077048, [], [1, 2, 3, 4, 5, 6]), + (1720229572.436301, [], [1, 2, 3, 4, 5, 6]), + (1720259010.216367, [], [1, 2, 3, 4, 5, 6]), + (1720259234.335094, [], [1, 2, 3, 4, 5, 6]), + (1720259236.42606, [], [1, 2, 3, 4, 5, 6]), + (1720318686.64822, [], [1, 2, 3, 4, 5, 6]), + (1720318843.45613, [], [1, 2, 3, 4, 5, 6]), + (1720318845.509738, [], [1, 2, 3, 4, 5, 6]), + (1720363113.918907, [], [1, 2, 3, 4, 5, 6]), + (1720363184.856665, [], [1, 2, 3, 4, 5, 6]), + (1720400947.604003, [], [1, 2, 3, 4, 5, 6]), + (1720400949.633637, [], [1, 2, 3, 4, 5, 6]), + (1720498232.720406, [], [1, 2, 3, 4, 5, 6]), + (1720498253.802808, [], [1, 2, 3, 4, 5, 6]), + (1720498255.908508, [], [1, 2, 3, 4, 5, 6]), + (1720586991.26782, [], [1, 2, 3, 4, 5, 6]), + (1720587059.251675, [], [1, 2, 3, 4, 5, 6]), + (1720587061.383312, [], [1, 2, 3, 4, 5, 6]), + (1720638042.876812, [], [1, 2, 3, 4, 5, 6]), + (1720638133.182092, [], [1, 2, 3, 4, 5, 6]), + (1720638135.286491, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719978486.488845, [], [1, 2, 3, 4, 5, 6]), (1720406010.994165, [], [1, 2, 3, 4, 5, 6])], + [(1720062931.526777, [], [1, 2, 3, 4, 5, 6])], + [ + (1720142330.725196, [], [1, 2, 3, 4, 5, 6]), + (1720238332.287607, [], [1, 2, 3, 4, 5, 6]), + (1720404745.279674, [], [1, 2, 3, 4, 5, 6]), + (1720577388.350881, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719973721.653682, [], [1, 2, 3, 4, 5, 6]), + (1720045556.714061, [], [1, 2, 3, 4, 5, 6]), + (1720286335.062706, [], [1, 2, 3, 4, 5, 6]), + (1720408637.593505, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719980831.540691, [], [1, 2, 3, 4, 5, 6]), + (1719980890.3872, [], [1, 2, 3, 4, 5, 6]), + (1719980892.464391, [], [1, 2, 3, 4, 5, 6]), + (1720027957.165729, [], [1, 2, 3, 4, 5, 6]), + (1720027959.212697, [], [1, 2, 3, 4, 5, 6]), + (1720055928.682589, [], [1, 2, 3, 4, 5, 6]), + (1720055930.747743, [], [1, 2, 3, 4, 5, 6]), + (1720138782.333308, [], [1, 2, 3, 4, 5, 6]), + (1720138842.547168, [], [1, 2, 3, 4, 5, 6]), + (1720138844.667335, [], [1, 2, 3, 4, 5, 6]), + (1720138846.225705, [], [1, 2, 3, 4, 5, 6]), + (1720153595.409537, [], [1, 2, 3, 4, 5, 6]), + (1720153694.792152, [], [1, 2, 3, 4, 5, 6]), + (1720222583.234486, [], [1, 2, 3, 4, 5, 6]), + (1720222651.732326, [], [1, 2, 3, 4, 5, 6]), + (1720222653.840022, [], [1, 2, 3, 4, 5, 6]), + (1720231129.338916, [], [1, 2, 3, 4, 5, 6]), + (1720231262.508465, [], [1, 2, 3, 4, 5, 6]), + (1720315761.130281, [], [1, 2, 3, 4, 5, 6]), + (1720315844.746953, [], [1, 2, 3, 4, 5, 6]), + (1720315846.831435, [], [1, 2, 3, 4, 5, 6]), + (1720406873.849957, [], [1, 2, 3, 4, 5, 6]), + (1720406879.412626, [], [1, 2, 3, 4, 5, 6]), + (1720485467.197531, [], [1, 2, 3, 4, 5, 6]), + (1720485486.733099, [], [1, 2, 3, 4, 5, 6]), + (1720485488.847143, [], [1, 2, 3, 4, 5, 6]), + (1720485492.354688, [], [1, 2, 3, 4, 5, 6]), + (1720485494.434006, [], [1, 2, 3, 4, 5, 6]), + (1720581292.87898, [], [1, 2, 3, 4, 5, 6]), + (1720581372.990683, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719965396.997192, [], [1, 2, 3, 4, 5, 6]), + (1720109840.37035, [], [1, 2, 3, 4, 5, 6]), + (1720224849.338664, [], [1, 2, 3, 4, 5, 6]), + (1720311680.960628, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720593841.069028, [], [1, 2, 3, 4, 5, 6])], + [(1720193711.631887, [], [1, 2, 3, 4, 5, 6]), (1720355386.424798, [], [1, 2, 3, 4, 5, 6])], + [(1720137394.637585, [], [1, 2, 3, 4, 5, 6]), (1720227526.549035, [], [1, 2, 3, 4, 5, 6])], + [(1720601724.604091, [], [1, 2, 3, 4, 5, 6])], + [(1720242114.286726, [], [1, 2, 3, 4, 5, 6]), (1720495287.866943, [], [1, 2, 3, 4, 5, 6])], + [ + (1719984060.976083, [], [1, 2, 3, 4, 5, 6]), + (1720233353.478142, [], [1, 2, 3, 4, 5, 6]), + (1720331822.027661, [], [1, 2, 3, 4, 5, 6]), + (1720499420.953642, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720327908.649598, [], [1, 2, 3, 4, 5, 6]), + (1720327957.004146, [], [1, 2, 3, 4, 5, 6]), + (1720328002.921775, [], [1, 2, 3, 4, 5, 6]), + (1720328054.34555, [], [1, 2, 3, 4, 5, 6]), + (1720394578.210396, [], [1, 2, 3, 4, 5, 6]), + (1720394668.213374, [], [1, 2, 3, 4, 5, 6]), + (1720394670.323029, [], [1, 2, 3, 4, 5, 6]), + (1720410358.68385, [], [1, 2, 3, 4, 5, 6]), + (1720410430.047079, [], [1, 2, 3, 4, 5, 6]), + (1720410432.093006, [], [1, 2, 3, 4, 5, 6]), + (1720485479.982584, [], [1, 2, 3, 4, 5, 6]), + (1720485552.035405, [], [1, 2, 3, 4, 5, 6]), + (1720485554.099771, [], [1, 2, 3, 4, 5, 6]), + (1720576265.461408, [], [1, 2, 3, 4, 5, 6]), + (1720576267.553332, [], [1, 2, 3, 4, 5, 6]), + (1720580196.882833, [], [1, 2, 3, 4, 5, 6]), + (1720580198.938581, [], [1, 2, 3, 4, 5, 6]), + (1720580201.66793, [], [1, 2, 3, 4, 5, 6]), + (1720580203.765767, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720512918.56325, [], [1, 2, 3, 4, 5, 6])], + [(1720587573.354151, [], [1, 2, 3, 4, 5, 6])], + [ + (1720059581.380866, [], [1, 2, 3, 4, 5, 6]), + (1720226059.821101, [], [1, 2, 3, 4, 5, 6]), + (1720569936.860231, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720483812.243251, [], [1, 2, 3, 4, 5, 6])], + [ + (1720047890.599986, [], [1, 2, 3, 4, 5, 6]), + (1720151933.610926, [], [1, 2, 3, 4, 5, 6]), + (1720426395.237753, [], [1, 2, 3, 4, 5, 6]), + (1720589584.479646, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720050613.849179, [], [1, 2, 3, 4, 5, 6]), + (1720183728.480776, [], [1, 2, 3, 4, 5, 6]), + (1720245305.222942, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719974717.393306, [], [1, 2, 3, 4, 5, 6]), + (1720238913.058213, [], [1, 2, 3, 4, 5, 6]), + (1720403863.202175, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720066616.245506, [], [1, 2, 3, 4, 5, 6]), (1720329802.077257, [], [1, 2, 3, 4, 5, 6])], + [(1720058023.220462, [], [1, 2, 3, 4, 5, 6]), (1720273250.296181, [], [1, 2, 3, 4, 5, 6])], + [(1720400521.741834, [], [1, 2, 3, 4, 5, 6])], + [(1720590006.799829, [], [1, 2, 3, 4, 5, 6])], + [ + (1719977522.311193, [], [1, 2, 3, 4, 5, 6]), + (1720394307.490994, [], [1, 2, 3, 4, 5, 6]), + (1720541599.758133, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720059720.64277, [], [1, 2, 3, 4, 5, 6]), + (1720225557.277258, [], [1, 2, 3, 4, 5, 6]), + (1720318879.528985, [], [1, 2, 3, 4, 5, 6]), + (1720448939.738279, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720159019.782951, [], [1, 2, 3, 4, 5, 6]), + (1720232688.231366, [], [1, 2, 3, 4, 5, 6]), + (1720312031.934367, [], [1, 2, 3, 4, 5, 6]), + (1720420263.292336, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720051467.327131, [], [1, 2, 3, 4, 5, 6]), + (1720226107.259649, [], [1, 2, 3, 4, 5, 6]), + (1720410027.350582, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719966308.30787, [], [1, 2, 3, 4, 5, 6]), + (1720136775.382126, [], [1, 2, 3, 4, 5, 6]), + (1720453167.302523, [], [1, 2, 3, 4, 5, 6]), + (1720578911.142536, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719965705.478301, [], [1, 2, 3, 4, 5, 6]), + (1720089492.274268, [], [1, 2, 3, 4, 5, 6]), + (1720458943.365803, [], [1, 2, 3, 4, 5, 6]), + (1720588170.374851, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720137202.259506, [], [1, 2, 3, 4, 5, 6]), (1720309242.730837, [], [1, 2, 3, 4, 5, 6])], + [(1720397209.557207, [], [1, 2, 3, 4, 5, 6]), (1720628958.303298, [], [1, 2, 3, 4, 5, 6])], + [(1719967303.936898, [], [1, 2, 3, 4, 5, 6]), (1720069496.922345, [], [1, 2, 3, 4, 5, 6])], + [ + (1719970996.586184, [], [1, 2, 3, 4, 5, 6]), + (1720168525.715398, [], [1, 2, 3, 4, 5, 6]), + (1720504098.515479, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720601826.09111, [], [1, 2, 3, 4, 5, 6])], + [ + (1719971199.057468, [], [1, 2, 3, 4, 5, 6]), + (1720068258.0151, [], [1, 2, 3, 4, 5, 6]), + (1720137337.044491, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719971421.175757, [], [1, 2, 3, 4, 5, 6]), (1720134959.956933, [], [1, 2, 3, 4, 5, 6])], + [ + (1719976305.13246, [], [1, 2, 3, 4, 5, 6]), + (1720059646.658845, [], [1, 2, 3, 4, 5, 6]), + (1720145964.773181, [], [1, 2, 3, 4, 5, 6]), + (1720233116.664838, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720436748.603567, [], [1, 2, 3, 4, 5, 6])], + [(1719969550.144929, [], [1, 2, 3, 4, 5, 6]), (1720315269.690666, [], [1, 2, 3, 4, 5, 6])], + [ + (1720274096.315691, [], [1, 2, 3, 4, 5, 6]), + (1720274154.981534, [], [1, 2, 3, 4, 5, 6]), + (1720274184.028094, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720268306.026574, [], [1, 2, 3, 4, 5, 6]), (1720323182.163554, [], [1, 2, 3, 4, 5, 6])], + [ + (1720032173.053995, [], [1, 2, 3, 4, 5, 6]), + (1720157155.365383, [], [1, 2, 3, 4, 5, 6]), + (1720314424.94755, [], [1, 2, 3, 4, 5, 6]), + (1720481047.114281, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720010572.095008, [], [1, 2, 3, 4, 5, 6])], + [ + (1719968638.302149, [], [1, 2, 3, 4, 5, 6]), + (1719968762.98274, [], [1, 2, 3, 4, 5, 6]), + (1719968765.072701, [], [1, 2, 3, 4, 5, 6]), + (1720005012.137582, [], [1, 2, 3, 4, 5, 6]), + (1720005160.858454, [], [1, 2, 3, 4, 5, 6]), + (1720005162.913788, [], [1, 2, 3, 4, 5, 6]), + (1720175681.69546, [], [1, 2, 3, 4, 5, 6]), + (1720175774.033356, [], [1, 2, 3, 4, 5, 6]), + (1720175776.151125, [], [1, 2, 3, 4, 5, 6]), + (1720220252.732147, [], [1, 2, 3, 4, 5, 6]), + (1720220252.777516, [], [1, 2, 3, 4, 5, 6]), + (1720220256.747294, [], [1, 2, 3, 4, 5, 6]), + (1720492139.162569, [], [1, 2, 3, 4, 5, 6]), + (1720492141.256483, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719970987.569565, [], [1, 2, 3, 4, 5, 6])], + [(1720054508.915859, [], [1, 2, 3, 4, 5, 6]), (1720315224.08896, [], [1, 2, 3, 4, 5, 6])], + [(1720325854.156535, [], [1, 2, 3, 4, 5, 6]), (1720483854.080251, [], [1, 2, 3, 4, 5, 6])], + [(1720406510.418443, [], [1, 2, 3, 4, 5, 6])], + [ + (1720030600.544521, [], [1, 2, 3, 4, 5, 6]), + (1720069524.719771, [], [1, 2, 3, 4, 5, 6]), + (1720484112.369653, [], [1, 2, 3, 4, 5, 6]), + (1720568851.121099, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720304785.385271, [], [1, 2, 3, 4, 5, 6])], + [ + (1720313035.494802, [], [1, 2, 3, 4, 5, 6]), + (1720313090.718424, [], [1, 2, 3, 4, 5, 6]), + (1720313239.783577, [], [1, 2, 3, 4, 5, 6]), + (1720313343.042083, [], [1, 2, 3, 4, 5, 6]), + (1720404935.491204, [], [1, 2, 3, 4, 5, 6]), + (1720404979.552845, [], [1, 2, 3, 4, 5, 6]), + (1720404981.647182, [], [1, 2, 3, 4, 5, 6]), + (1720501716.284759, [], [1, 2, 3, 4, 5, 6]), + (1720501761.154088, [], [1, 2, 3, 4, 5, 6]), + (1720501763.234903, [], [1, 2, 3, 4, 5, 6]), + (1720584650.853158, [], [1, 2, 3, 4, 5, 6]), + (1720584688.899508, [], [1, 2, 3, 4, 5, 6]), + (1720584691.020015, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720229088.205557, [], [1, 2, 3, 4, 5, 6]), (1720487228.612214, [], [1, 2, 3, 4, 5, 6])], + [(1720241088.456982, [], [1, 2, 3, 4, 5, 6])], + [(1720042764.160666, [], [1, 2, 3, 4, 5, 6])], + [ + (1719984325.782697, [], [1, 2, 3, 4, 5, 6]), + (1720047953.098659, [], [1, 2, 3, 4, 5, 6]), + (1720268267.887048, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720054622.362154, [], [1, 2, 3, 4, 5, 6]), + (1720134575.953204, [], [1, 2, 3, 4, 5, 6]), + (1720416355.096939, [], [1, 2, 3, 4, 5, 6]), + (1720500581.691615, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719972690.486054, [], [1, 2, 3, 4, 5, 6]), + (1719972877.103609, [], [1, 2, 3, 4, 5, 6]), + (1719972879.22778, [], [1, 2, 3, 4, 5, 6]), + (1719972881.797102, [], [1, 2, 3, 4, 5, 6]), + (1719972883.859612, [], [1, 2, 3, 4, 5, 6]), + (1720052338.317127, [], [1, 2, 3, 4, 5, 6]), + (1720052340.409592, [], [1, 2, 3, 4, 5, 6]), + (1720069426.554888, [], [1, 2, 3, 4, 5, 6]), + (1720069428.615973, [], [1, 2, 3, 4, 5, 6]), + (1720149027.365317, [], [1, 2, 3, 4, 5, 6]), + (1720149089.951754, [], [1, 2, 3, 4, 5, 6]), + (1720149092.012724, [], [1, 2, 3, 4, 5, 6]), + (1720234565.610403, [], [1, 2, 3, 4, 5, 6]), + (1720239125.82035, [], [1, 2, 3, 4, 5, 6]), + (1720239147.87241, [], [1, 2, 3, 4, 5, 6]), + (1720318243.573983, [], [1, 2, 3, 4, 5, 6]), + (1720318245.63705, [], [1, 2, 3, 4, 5, 6]), + (1720418009.877203, [], [1, 2, 3, 4, 5, 6]), + (1720418011.983148, [], [1, 2, 3, 4, 5, 6]), + (1720418014.464823, [], [1, 2, 3, 4, 5, 6]), + (1720485277.913378, [], [1, 2, 3, 4, 5, 6]), + (1720485280.026695, [], [1, 2, 3, 4, 5, 6]), + (1720574328.529507, [], [1, 2, 3, 4, 5, 6]), + (1720574330.633898, [], [1, 2, 3, 4, 5, 6]), + (1720581736.051228, [], [1, 2, 3, 4, 5, 6]), + (1720581766.135021, [], [1, 2, 3, 4, 5, 6]), + (1720581768.228326, [], [1, 2, 3, 4, 5, 6]), + (1720652888.715284, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720328167.133332, [], [1, 2, 3, 4, 5, 6]), (1720394291.998224, [], [1, 2, 3, 4, 5, 6])], + [(1720138278.025347, [], [1, 2, 3, 4, 5, 6])], + [(1720411684.615562, [], [1, 2, 3, 4, 5, 6])], + [ + (1719979551.790599, [], [1, 2, 3, 4, 5, 6]), + (1720062548.79153, [], [1, 2, 3, 4, 5, 6]), + (1720152645.092565, [], [1, 2, 3, 4, 5, 6]), + (1720273648.542968, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720150050.836492, [], [1, 2, 3, 4, 5, 6])], + [(1720235665.517364, [], [1, 2, 3, 4, 5, 6]), (1720504709.666893, [], [1, 2, 3, 4, 5, 6])], + [(1720502409.011067, [], [1, 2, 3, 4, 5, 6]), (1720652305.691241, [], [1, 2, 3, 4, 5, 6])], + [ + (1719983664.396995, [], [1, 2, 3, 4, 5, 6]), + (1720057964.860551, [], [1, 2, 3, 4, 5, 6]), + (1720058069.016671, [], [1, 2, 3, 4, 5, 6]), + (1720119055.986377, [], [1, 2, 3, 4, 5, 6]), + (1720119177.882926, [], [1, 2, 3, 4, 5, 6]), + (1720146988.862958, [], [1, 2, 3, 4, 5, 6]), + (1720146990.940118, [], [1, 2, 3, 4, 5, 6]), + (1720207091.824328, [], [1, 2, 3, 4, 5, 6]), + (1720207147.984162, [], [1, 2, 3, 4, 5, 6]), + (1720207150.045311, [], [1, 2, 3, 4, 5, 6]), + (1720221686.916464, [], [1, 2, 3, 4, 5, 6]), + (1720221731.792885, [], [1, 2, 3, 4, 5, 6]), + (1720221733.892091, [], [1, 2, 3, 4, 5, 6]), + (1720221736.114027, [], [1, 2, 3, 4, 5, 6]), + (1720221738.1731, [], [1, 2, 3, 4, 5, 6]), + (1720221740.137735, [], [1, 2, 3, 4, 5, 6]), + (1720221742.219472, [], [1, 2, 3, 4, 5, 6]), + (1720319188.083254, [], [1, 2, 3, 4, 5, 6]), + (1720319190.195166, [], [1, 2, 3, 4, 5, 6]), + (1720333160.336537, [], [1, 2, 3, 4, 5, 6]), + (1720333162.39224, [], [1, 2, 3, 4, 5, 6]), + (1720350382.882768, [], [1, 2, 3, 4, 5, 6]), + (1720350384.998862, [], [1, 2, 3, 4, 5, 6]), + (1720350385.400746, [], [1, 2, 3, 4, 5, 6]), + (1720350387.504804, [], [1, 2, 3, 4, 5, 6]), + (1720350390.868713, [], [1, 2, 3, 4, 5, 6]), + (1720350392.929211, [], [1, 2, 3, 4, 5, 6]), + (1720482928.905461, [], [1, 2, 3, 4, 5, 6]), + (1720482987.630373, [], [1, 2, 3, 4, 5, 6]), + (1720549510.694147, [], [1, 2, 3, 4, 5, 6]), + (1720549582.87966, [], [1, 2, 3, 4, 5, 6]), + (1720549584.95341, [], [1, 2, 3, 4, 5, 6]), + (1720581476.586746, [], [1, 2, 3, 4, 5, 6]), + (1720581478.656771, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719977177.729628, [], [1, 2, 3, 4, 5, 6]), (1720393638.078415, [], [1, 2, 3, 4, 5, 6])], + [ + (1719980378.113974, [], [1, 2, 3, 4, 5, 6]), + (1720443102.049493, [], [1, 2, 3, 4, 5, 6]), + (1720590770.939412, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719979883.308045, [], [1, 2, 3, 4, 5, 6]), + (1720230654.923495, [], [1, 2, 3, 4, 5, 6]), + (1720310908.910099, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719964829.898192, [], [1, 2, 3, 4, 5, 6]), + (1719964832.003811, [], [1, 2, 3, 4, 5, 6]), + (1719985240.876339, [], [1, 2, 3, 4, 5, 6]), + (1719985242.911416, [], [1, 2, 3, 4, 5, 6]), + (1719985243.633507, [], [1, 2, 3, 4, 5, 6]), + (1719985245.665729, [], [1, 2, 3, 4, 5, 6]), + (1720060650.829248, [], [1, 2, 3, 4, 5, 6]), + (1720060759.718692, [], [1, 2, 3, 4, 5, 6]), + (1720060761.830242, [], [1, 2, 3, 4, 5, 6]), + (1720070260.443094, [], [1, 2, 3, 4, 5, 6]), + (1720070280.911994, [], [1, 2, 3, 4, 5, 6]), + (1720070282.979246, [], [1, 2, 3, 4, 5, 6]), + (1720134645.425223, [], [1, 2, 3, 4, 5, 6]), + (1720134793.819981, [], [1, 2, 3, 4, 5, 6]), + (1720134795.932398, [], [1, 2, 3, 4, 5, 6]), + (1720155938.192604, [], [1, 2, 3, 4, 5, 6]), + (1720155940.320279, [], [1, 2, 3, 4, 5, 6]), + (1720155945.041101, [], [1, 2, 3, 4, 5, 6]), + (1720155947.088061, [], [1, 2, 3, 4, 5, 6]), + (1720236895.111761, [], [1, 2, 3, 4, 5, 6]), + (1720236912.473535, [], [1, 2, 3, 4, 5, 6]), + (1720236914.593968, [], [1, 2, 3, 4, 5, 6]), + (1720236917.655587, [], [1, 2, 3, 4, 5, 6]), + (1720318871.824625, [], [1, 2, 3, 4, 5, 6]), + (1720318935.358285, [], [1, 2, 3, 4, 5, 6]), + (1720318937.446561, [], [1, 2, 3, 4, 5, 6]), + (1720318940.05207, [], [1, 2, 3, 4, 5, 6]), + (1720318942.106239, [], [1, 2, 3, 4, 5, 6]), + (1720405217.370251, [], [1, 2, 3, 4, 5, 6]), + (1720405312.528519, [], [1, 2, 3, 4, 5, 6]), + (1720405314.627163, [], [1, 2, 3, 4, 5, 6]), + (1720413100.204244, [], [1, 2, 3, 4, 5, 6]), + (1720413102.291035, [], [1, 2, 3, 4, 5, 6]), + (1720496542.308228, [], [1, 2, 3, 4, 5, 6]), + (1720574260.260325, [], [1, 2, 3, 4, 5, 6]), + (1720574343.117651, [], [1, 2, 3, 4, 5, 6]), + (1720574345.20748, [], [1, 2, 3, 4, 5, 6]), + (1720578705.104516, [], [1, 2, 3, 4, 5, 6]), + (1720578717.159504, [], [1, 2, 3, 4, 5, 6]), + (1720578719.26077, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720071370.538676, [], [1, 2, 3, 4, 5, 6])], + [(1719972220.86175, [], [1, 2, 3, 4, 5, 6]), (1720227223.558904, [], [1, 2, 3, 4, 5, 6])], + [ + (1720047692.206123, [], [1, 2, 3, 4, 5, 6]), + (1720074479.043983, [], [1, 2, 3, 4, 5, 6]), + (1720221755.131247, [], [1, 2, 3, 4, 5, 6]), + (1720343377.429715, [], [1, 2, 3, 4, 5, 6]), + (1720581159.65796, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720054906.379171, [], [1, 2, 3, 4, 5, 6]), + (1720326827.193456, [], [1, 2, 3, 4, 5, 6]), + (1720395837.565662, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719972411.855532, [], [1, 2, 3, 4, 5, 6]), + (1719972538.863121, [], [1, 2, 3, 4, 5, 6]), + (1719972540.935712, [], [1, 2, 3, 4, 5, 6]), + (1720063717.900878, [], [1, 2, 3, 4, 5, 6]), + (1720063719.954111, [], [1, 2, 3, 4, 5, 6]), + (1720070114.807467, [], [1, 2, 3, 4, 5, 6]), + (1720070235.024434, [], [1, 2, 3, 4, 5, 6]), + (1720070237.14674, [], [1, 2, 3, 4, 5, 6]), + (1720097819.236115, [], [1, 2, 3, 4, 5, 6]), + (1720097978.260021, [], [1, 2, 3, 4, 5, 6]), + (1720097980.382821, [], [1, 2, 3, 4, 5, 6]), + (1720151026.716063, [], [1, 2, 3, 4, 5, 6]), + (1720151173.670938, [], [1, 2, 3, 4, 5, 6]), + (1720151175.717239, [], [1, 2, 3, 4, 5, 6]), + (1720166439.941955, [], [1, 2, 3, 4, 5, 6]), + (1720166583.693905, [], [1, 2, 3, 4, 5, 6]), + (1720166585.791065, [], [1, 2, 3, 4, 5, 6]), + (1720181553.630642, [], [1, 2, 3, 4, 5, 6]), + (1720181555.746202, [], [1, 2, 3, 4, 5, 6]), + (1720242210.300006, [], [1, 2, 3, 4, 5, 6]), + (1720242331.451228, [], [1, 2, 3, 4, 5, 6]), + (1720316730.127117, [], [1, 2, 3, 4, 5, 6]), + (1720316751.481651, [], [1, 2, 3, 4, 5, 6]), + (1720350332.517593, [], [1, 2, 3, 4, 5, 6]), + (1720350427.724851, [], [1, 2, 3, 4, 5, 6]), + (1720350429.836812, [], [1, 2, 3, 4, 5, 6]), + (1720396153.382808, [], [1, 2, 3, 4, 5, 6]), + (1720396199.106453, [], [1, 2, 3, 4, 5, 6]), + (1720396201.15929, [], [1, 2, 3, 4, 5, 6]), + (1720424092.525755, [], [1, 2, 3, 4, 5, 6]), + (1720424190.959176, [], [1, 2, 3, 4, 5, 6]), + (1720424193.037739, [], [1, 2, 3, 4, 5, 6]), + (1720492456.877253, [], [1, 2, 3, 4, 5, 6]), + (1720492529.103048, [], [1, 2, 3, 4, 5, 6]), + (1720492531.198928, [], [1, 2, 3, 4, 5, 6]), + (1720583806.008143, [], [1, 2, 3, 4, 5, 6]), + (1720583868.43082, [], [1, 2, 3, 4, 5, 6]), + (1720648763.855471, [], [1, 2, 3, 4, 5, 6]), + (1720648878.799852, [], [1, 2, 3, 4, 5, 6]), + (1720648880.882297, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720071793.774403, [], [1, 2, 3, 4, 5, 6]), + (1720309002.505766, [], [1, 2, 3, 4, 5, 6]), + (1720367384.379119, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719978345.677095, [], [1, 2, 3, 4, 5, 6]), (1720134660.416426, [], [1, 2, 3, 4, 5, 6])], + [ + (1720056400.339178, [], [1, 2, 3, 4, 5, 6]), + (1720137451.906538, [], [1, 2, 3, 4, 5, 6]), + (1720581731.115191, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719974999.631834, [], [1, 2, 3, 4, 5, 6]), (1720503857.499785, [], [1, 2, 3, 4, 5, 6])], + [(1720325249.830373, [], [1, 2, 3, 4, 5, 6])], + [ + (1719970175.486046, [], [1, 2, 3, 4, 5, 6]), + (1720061532.244847, [], [1, 2, 3, 4, 5, 6]), + (1720387059.054565, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720157797.242967, [], [1, 2, 3, 4, 5, 6])], + [(1720490173.84352, [], [1, 2, 3, 4, 5, 6])], + [(1720568387.145132, [], [1, 2, 3, 4, 5, 6])], + [(1720027447.264569, [], [1, 2, 3, 4, 5, 6])], + [(1719979106.899872, [], [1, 2, 3, 4, 5, 6]), (1720417473.653713, [], [1, 2, 3, 4, 5, 6])], + [(1720153359.982848, [], [1, 2, 3, 4, 5, 6]), (1720468837.459019, [], [1, 2, 3, 4, 5, 6])], + [(1720047669.218866, [], [1, 2, 3, 4, 5, 6])], + [(1720230050.113895, [], [1, 2, 3, 4, 5, 6])], + [ + (1719975411.228945, [], [1, 2, 3, 4, 5, 6]), + (1720235199.496284, [], [1, 2, 3, 4, 5, 6]), + (1720403154.17646, [], [1, 2, 3, 4, 5, 6]), + (1720626578.282517, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719978136.275137, [], [1, 2, 3, 4, 5, 6]), (1720331670.572264, [], [1, 2, 3, 4, 5, 6])], + [ + (1719975669.597909, [], [1, 2, 3, 4, 5, 6]), + (1719975763.25366, [], [1, 2, 3, 4, 5, 6]), + (1720055306.937976, [], [1, 2, 3, 4, 5, 6]), + (1720055449.351479, [], [1, 2, 3, 4, 5, 6]), + (1720067433.572041, [], [1, 2, 3, 4, 5, 6]), + (1720067452.84519, [], [1, 2, 3, 4, 5, 6]), + (1720067454.935816, [], [1, 2, 3, 4, 5, 6]), + (1720233716.974937, [], [1, 2, 3, 4, 5, 6]), + (1720233884.056907, [], [1, 2, 3, 4, 5, 6]), + (1720233886.107033, [], [1, 2, 3, 4, 5, 6]), + (1720238869.144339, [], [1, 2, 3, 4, 5, 6]), + (1720239054.729577, [], [1, 2, 3, 4, 5, 6]), + (1720239056.811577, [], [1, 2, 3, 4, 5, 6]), + (1720248048.594017, [], [1, 2, 3, 4, 5, 6]), + (1720248147.506317, [], [1, 2, 3, 4, 5, 6]), + (1720248149.540209, [], [1, 2, 3, 4, 5, 6]), + (1720323761.342714, [], [1, 2, 3, 4, 5, 6]), + (1720323763.381547, [], [1, 2, 3, 4, 5, 6]), + (1720400825.240853, [], [1, 2, 3, 4, 5, 6]), + (1720400844.815642, [], [1, 2, 3, 4, 5, 6]), + (1720410954.1329, [], [1, 2, 3, 4, 5, 6]), + (1720410956.230411, [], [1, 2, 3, 4, 5, 6]), + (1720410956.661655, [], [1, 2, 3, 4, 5, 6]), + (1720410958.800282, [], [1, 2, 3, 4, 5, 6]), + (1720414005.128157, [], [1, 2, 3, 4, 5, 6]), + (1720414035.742095, [], [1, 2, 3, 4, 5, 6]), + (1720414037.861356, [], [1, 2, 3, 4, 5, 6]), + (1720414039.522054, [], [1, 2, 3, 4, 5, 6]), + (1720414041.622559, [], [1, 2, 3, 4, 5, 6]), + (1720449836.553695, [], [1, 2, 3, 4, 5, 6]), + (1720449909.88067, [], [1, 2, 3, 4, 5, 6]), + (1720449912.006572, [], [1, 2, 3, 4, 5, 6]), + (1720504478.640048, [], [1, 2, 3, 4, 5, 6]), + (1720504584.183246, [], [1, 2, 3, 4, 5, 6]), + (1720504586.273448, [], [1, 2, 3, 4, 5, 6]), + (1720589586.941948, [], [1, 2, 3, 4, 5, 6]), + (1720589732.653657, [], [1, 2, 3, 4, 5, 6]), + (1720589734.757411, [], [1, 2, 3, 4, 5, 6]), + (1720589735.718174, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719987925.192586, [], [1, 2, 3, 4, 5, 6]), (1720319498.157106, [], [1, 2, 3, 4, 5, 6])], + [(1720140316.935341, [], [1, 2, 3, 4, 5, 6]), (1720581286.138288, [], [1, 2, 3, 4, 5, 6])], + [ + (1719984942.453601, [], [1, 2, 3, 4, 5, 6]), + (1720334036.972544, [], [1, 2, 3, 4, 5, 6]), + (1720568302.136228, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720231990.896895, [], [1, 2, 3, 4, 5, 6]), (1720320392.727402, [], [1, 2, 3, 4, 5, 6])], + [(1720151072.246138, [], [1, 2, 3, 4, 5, 6]), (1720309428.675922, [], [1, 2, 3, 4, 5, 6])], + [(1720652752.302257, [], [1, 2, 3, 4, 5, 6])], + [ + (1719977332.758786, [], [1, 2, 3, 4, 5, 6]), + (1720135118.942837, [], [1, 2, 3, 4, 5, 6]), + (1720498977.766189, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720054300.10225, [], [1, 2, 3, 4, 5, 6]), (1720419250.119038, [], [1, 2, 3, 4, 5, 6])], + [ + (1720170242.586928, [], [1, 2, 3, 4, 5, 6]), + (1720322954.401713, [], [1, 2, 3, 4, 5, 6]), + (1720500416.057333, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719976447.157218, [], [1, 2, 3, 4, 5, 6])], + [(1720134454.623643, [], [1, 2, 3, 4, 5, 6]), (1720482790.529945, [], [1, 2, 3, 4, 5, 6])], + [(1720575291.374898, [], [1, 2, 3, 4, 5, 6])], + [(1720575147.912954, [], [1, 2, 3, 4, 5, 6])], + [ + (1719997197.65312, [], [1, 2, 3, 4, 5, 6]), + (1720137704.47896, [], [1, 2, 3, 4, 5, 6]), + (1720226085.527498, [], [1, 2, 3, 4, 5, 6]), + (1720306837.86921, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720314781.298338, [], [1, 2, 3, 4, 5, 6]), (1720443503.319112, [], [1, 2, 3, 4, 5, 6])], + [ + (1719976515.23989, [], [1, 2, 3, 4, 5, 6]), + (1720225806.720086, [], [1, 2, 3, 4, 5, 6]), + (1720388901.256231, [], [1, 2, 3, 4, 5, 6]), + (1720490185.842396, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720568142.650151, [], [1, 2, 3, 4, 5, 6])], + [(1720587602.828532, [], [1, 2, 3, 4, 5, 6])], + [(1720228569.783763, [], [1, 2, 3, 4, 5, 6]), (1720577136.698764, [], [1, 2, 3, 4, 5, 6])], + [ + (1720058398.793045, [], [1, 2, 3, 4, 5, 6]), + (1720317616.711315, [], [1, 2, 3, 4, 5, 6]), + (1720498994.241943, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719988512.009335, [], [1, 2, 3, 4, 5, 6]), + (1720411879.880695, [], [1, 2, 3, 4, 5, 6]), + (1720575546.218164, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720245099.046699, [], [1, 2, 3, 4, 5, 6]), (1720652539.847041, [], [1, 2, 3, 4, 5, 6])], + [ + (1719965518.303227, [], [1, 2, 3, 4, 5, 6]), + (1720241249.736668, [], [1, 2, 3, 4, 5, 6]), + (1720410560.906617, [], [1, 2, 3, 4, 5, 6]), + (1720566388.427971, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720033619.669265, [], [1, 2, 3, 4, 5, 6]), + (1720309514.690673, [], [1, 2, 3, 4, 5, 6]), + (1720584737.484501, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719981137.891986, [], [1, 2, 3, 4, 5, 6]), + (1719981255.525287, [], [1, 2, 3, 4, 5, 6]), + (1719981257.57542, [], [1, 2, 3, 4, 5, 6]), + (1720156254.934266, [], [1, 2, 3, 4, 5, 6]), + (1720156432.088183, [], [1, 2, 3, 4, 5, 6]), + (1720221245.352908, [], [1, 2, 3, 4, 5, 6]), + (1720221247.415618, [], [1, 2, 3, 4, 5, 6]), + (1720306695.430622, [], [1, 2, 3, 4, 5, 6]), + (1720306697.509606, [], [1, 2, 3, 4, 5, 6]), + (1720399726.625066, [], [1, 2, 3, 4, 5, 6]), + (1720399728.675873, [], [1, 2, 3, 4, 5, 6]), + (1720486842.405361, [], [1, 2, 3, 4, 5, 6]), + (1720486974.649877, [], [1, 2, 3, 4, 5, 6]), + (1720494605.919949, [], [1, 2, 3, 4, 5, 6]), + (1720494724.480053, [], [1, 2, 3, 4, 5, 6]), + (1720494726.541559, [], [1, 2, 3, 4, 5, 6]), + (1720572824.284783, [], [1, 2, 3, 4, 5, 6]), + (1720572826.355789, [], [1, 2, 3, 4, 5, 6]), + (1720652512.753893, [], [1, 2, 3, 4, 5, 6]), + (1720652514.833743, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720044224.653908, [], [1, 2, 3, 4, 5, 6]), (1720583176.852571, [], [1, 2, 3, 4, 5, 6])], + [(1720567262.122889, [], [1, 2, 3, 4, 5, 6])], + [(1720518049.925836, [], [1, 2, 3, 4, 5, 6])], + [(1720148280.678113, [], [1, 2, 3, 4, 5, 6]), (1720391739.484219, [], [1, 2, 3, 4, 5, 6])], + [ + (1719983654.268169, [], [1, 2, 3, 4, 5, 6]), + (1719983761.844014, [], [1, 2, 3, 4, 5, 6]), + (1719983763.902973, [], [1, 2, 3, 4, 5, 6]), + (1720014262.846562, [], [1, 2, 3, 4, 5, 6]), + (1720014264.966534, [], [1, 2, 3, 4, 5, 6]), + (1720014268.064236, [], [1, 2, 3, 4, 5, 6]), + (1720014270.176366, [], [1, 2, 3, 4, 5, 6]), + (1720062164.847608, [], [1, 2, 3, 4, 5, 6]), + (1720062166.93557, [], [1, 2, 3, 4, 5, 6]), + (1720070760.286042, [], [1, 2, 3, 4, 5, 6]), + (1720070793.879274, [], [1, 2, 3, 4, 5, 6]), + (1720070795.974998, [], [1, 2, 3, 4, 5, 6]), + (1720136175.682667, [], [1, 2, 3, 4, 5, 6]), + (1720136177.782735, [], [1, 2, 3, 4, 5, 6]), + (1720150756.421019, [], [1, 2, 3, 4, 5, 6]), + (1720150758.537073, [], [1, 2, 3, 4, 5, 6]), + (1720226712.358545, [], [1, 2, 3, 4, 5, 6]), + (1720320316.371588, [], [1, 2, 3, 4, 5, 6]), + (1720396676.623722, [], [1, 2, 3, 4, 5, 6]), + (1720396759.330429, [], [1, 2, 3, 4, 5, 6]), + (1720482810.511366, [], [1, 2, 3, 4, 5, 6]), + (1720482891.609285, [], [1, 2, 3, 4, 5, 6]), + (1720482893.739553, [], [1, 2, 3, 4, 5, 6]), + (1720502988.652815, [], [1, 2, 3, 4, 5, 6]), + (1720503034.447086, [], [1, 2, 3, 4, 5, 6]), + (1720503036.52898, [], [1, 2, 3, 4, 5, 6]), + (1720503036.606516, [], [1, 2, 3, 4, 5, 6]), + (1720503038.712119, [], [1, 2, 3, 4, 5, 6]), + (1720566567.148583, [], [1, 2, 3, 4, 5, 6]), + (1720566710.618717, [], [1, 2, 3, 4, 5, 6]), + (1720624425.022175, [], [1, 2, 3, 4, 5, 6]), + (1720624567.571474, [], [1, 2, 3, 4, 5, 6]), + (1720624569.66289, [], [1, 2, 3, 4, 5, 6]), + (1720652508.525789, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719978793.297818, [], [1, 2, 3, 4, 5, 6]), (1720312690.624643, [], [1, 2, 3, 4, 5, 6])], + [(1720408103.17786, [], [1, 2, 3, 4, 5, 6]), (1720496665.901316, [], [1, 2, 3, 4, 5, 6])], + [(1720397084.267673, [], [1, 2, 3, 4, 5, 6]), (1720499411.209847, [], [1, 2, 3, 4, 5, 6])], + [ + (1720111853.777887, [], [1, 2, 3, 4, 5, 6]), + (1720111923.412934, [], [1, 2, 3, 4, 5, 6]), + (1720139482.167685, [], [1, 2, 3, 4, 5, 6]), + (1720139533.842338, [], [1, 2, 3, 4, 5, 6]), + (1720139535.907287, [], [1, 2, 3, 4, 5, 6]), + (1720139540.267313, [], [1, 2, 3, 4, 5, 6]), + (1720139542.34773, [], [1, 2, 3, 4, 5, 6]), + (1720139547.051966, [], [1, 2, 3, 4, 5, 6]), + (1720139549.136732, [], [1, 2, 3, 4, 5, 6]), + (1720464344.794745, [], [1, 2, 3, 4, 5, 6]), + (1720464401.900918, [], [1, 2, 3, 4, 5, 6]), + (1720464404.029255, [], [1, 2, 3, 4, 5, 6]), + (1720498850.875209, [], [1, 2, 3, 4, 5, 6]), + (1720499103.608103, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719973523.458065, [], [1, 2, 3, 4, 5, 6]), (1720233566.787523, [], [1, 2, 3, 4, 5, 6])], + [(1720407928.090004, [], [1, 2, 3, 4, 5, 6])], + [(1720032729.148346, [], [1, 2, 3, 4, 5, 6]), (1720062532.225999, [], [1, 2, 3, 4, 5, 6])], + [(1720034095.862663, [], [1, 2, 3, 4, 5, 6]), (1720491285.681862, [], [1, 2, 3, 4, 5, 6])], + [(1720096472.997597, [], [1, 2, 3, 4, 5, 6]), (1720568056.766425, [], [1, 2, 3, 4, 5, 6])], + [(1720138718.911672, [], [1, 2, 3, 4, 5, 6])], + [ + (1720336998.597537, [], [1, 2, 3, 4, 5, 6]), + (1720489473.142035, [], [1, 2, 3, 4, 5, 6]), + (1720574315.596422, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720577328.151149, [], [1, 2, 3, 4, 5, 6])], + [(1720072713.69671, [], [1, 2, 3, 4, 5, 6]), (1720231002.690818, [], [1, 2, 3, 4, 5, 6])], + [(1719966317.997493, [], [1, 2, 3, 4, 5, 6]), (1720238108.647106, [], [1, 2, 3, 4, 5, 6])], + [(1720146847.656681, [], [1, 2, 3, 4, 5, 6])], + [(1720239981.42926, [], [1, 2, 3, 4, 5, 6])], + [(1720081339.444776, [], [1, 2, 3, 4, 5, 6]), (1720234051.371763, [], [1, 2, 3, 4, 5, 6])], + [(1720236937.844197, [], [1, 2, 3, 4, 5, 6]), (1720501314.981075, [], [1, 2, 3, 4, 5, 6])], + [(1720495611.198831, [], [1, 2, 3, 4, 5, 6])], + [(1720071452.84595, [], [1, 2, 3, 4, 5, 6])], + [(1720320971.754361, [], [1, 2, 3, 4, 5, 6])], + [ + (1719982790.224924, [], [1, 2, 3, 4, 5, 6]), + (1719982861.078823, [], [1, 2, 3, 4, 5, 6]), + (1719982863.122702, [], [1, 2, 3, 4, 5, 6]), + (1720052453.241504, [], [1, 2, 3, 4, 5, 6]), + (1720052505.408448, [], [1, 2, 3, 4, 5, 6]), + (1720052507.485592, [], [1, 2, 3, 4, 5, 6]), + (1720078456.868981, [], [1, 2, 3, 4, 5, 6]), + (1720137047.584706, [], [1, 2, 3, 4, 5, 6]), + (1720137124.096958, [], [1, 2, 3, 4, 5, 6]), + (1720137126.192241, [], [1, 2, 3, 4, 5, 6]), + (1720155528.420602, [], [1, 2, 3, 4, 5, 6]), + (1720155596.835697, [], [1, 2, 3, 4, 5, 6]), + (1720155598.919376, [], [1, 2, 3, 4, 5, 6]), + (1720204090.330488, [], [1, 2, 3, 4, 5, 6]), + (1720204222.690243, [], [1, 2, 3, 4, 5, 6]), + (1720204224.805824, [], [1, 2, 3, 4, 5, 6]), + (1720232760.467367, [], [1, 2, 3, 4, 5, 6]), + (1720232859.977733, [], [1, 2, 3, 4, 5, 6]), + (1720242903.930897, [], [1, 2, 3, 4, 5, 6]), + (1720242906.021355, [], [1, 2, 3, 4, 5, 6]), + (1720309697.411345, [], [1, 2, 3, 4, 5, 6]), + (1720309699.483954, [], [1, 2, 3, 4, 5, 6]), + (1720406346.354509, [], [1, 2, 3, 4, 5, 6]), + (1720406523.466919, [], [1, 2, 3, 4, 5, 6]), + (1720406525.535072, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720065979.154591, [], [1, 2, 3, 4, 5, 6]), (1720325699.423285, [], [1, 2, 3, 4, 5, 6])], + [(1720244750.093352, [], [1, 2, 3, 4, 5, 6]), (1720394343.192185, [], [1, 2, 3, 4, 5, 6])], + [(1720193298.590097, [], [1, 2, 3, 4, 5, 6]), (1720315677.193089, [], [1, 2, 3, 4, 5, 6])], + [(1720501281.07252, [], [1, 2, 3, 4, 5, 6])], + [(1720055721.622214, [], [1, 2, 3, 4, 5, 6])], + [ + (1720137839.895581, [], [1, 2, 3, 4, 5, 6]), + (1720231251.548774, [], [1, 2, 3, 4, 5, 6]), + (1720350224.693877, [], [1, 2, 3, 4, 5, 6]), + (1720519304.741337, [], [1, 2, 3, 4, 5, 6]), + (1720586487.784295, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719969181.053709, [], [1, 2, 3, 4, 5, 6]), (1720434586.266895, [], [1, 2, 3, 4, 5, 6])], + [(1720070912.148493, [], [1, 2, 3, 4, 5, 6])], + [(1720244703.673132, [], [1, 2, 3, 4, 5, 6]), (1720494833.034907, [], [1, 2, 3, 4, 5, 6])], + [ + (1720053732.993216, [], [1, 2, 3, 4, 5, 6]), + (1720053735.086697, [], [1, 2, 3, 4, 5, 6]), + (1720081490.114819, [], [1, 2, 3, 4, 5, 6]), + (1720081492.188923, [], [1, 2, 3, 4, 5, 6]), + (1720108329.744396, [], [1, 2, 3, 4, 5, 6]), + (1720108420.858541, [], [1, 2, 3, 4, 5, 6]), + (1720108422.93886, [], [1, 2, 3, 4, 5, 6]), + (1720142971.786605, [], [1, 2, 3, 4, 5, 6]), + (1720143021.896153, [], [1, 2, 3, 4, 5, 6]), + (1720149975.921352, [], [1, 2, 3, 4, 5, 6]), + (1720150041.125351, [], [1, 2, 3, 4, 5, 6]), + (1720150043.15518, [], [1, 2, 3, 4, 5, 6]), + (1720200733.408027, [], [1, 2, 3, 4, 5, 6]), + (1720200744.366236, [], [1, 2, 3, 4, 5, 6]), + (1720200746.48024, [], [1, 2, 3, 4, 5, 6]), + (1720226248.428928, [], [1, 2, 3, 4, 5, 6]), + (1720226387.261389, [], [1, 2, 3, 4, 5, 6]), + (1720325189.273212, [], [1, 2, 3, 4, 5, 6]), + (1720367266.448359, [], [1, 2, 3, 4, 5, 6]), + (1720367268.528501, [], [1, 2, 3, 4, 5, 6]), + (1720397514.119584, [], [1, 2, 3, 4, 5, 6]), + (1720397583.541623, [], [1, 2, 3, 4, 5, 6]), + (1720397585.62972, [], [1, 2, 3, 4, 5, 6]), + (1720407649.068004, [], [1, 2, 3, 4, 5, 6]), + (1720407859.450723, [], [1, 2, 3, 4, 5, 6]), + (1720407861.5267, [], [1, 2, 3, 4, 5, 6]), + (1720418226.184583, [], [1, 2, 3, 4, 5, 6]), + (1720418312.907521, [], [1, 2, 3, 4, 5, 6]), + (1720418312.959891, [], [1, 2, 3, 4, 5, 6]), + (1720418314.508588, [], [1, 2, 3, 4, 5, 6]), + (1720429033.410454, [], [1, 2, 3, 4, 5, 6]), + (1720429217.5183, [], [1, 2, 3, 4, 5, 6]), + (1720429219.58254, [], [1, 2, 3, 4, 5, 6]), + (1720476196.299215, [], [1, 2, 3, 4, 5, 6]), + (1720476290.414317, [], [1, 2, 3, 4, 5, 6]), + (1720476292.497993, [], [1, 2, 3, 4, 5, 6]), + (1720496668.635514, [], [1, 2, 3, 4, 5, 6]), + (1720496670.762669, [], [1, 2, 3, 4, 5, 6]), + (1720566807.578929, [], [1, 2, 3, 4, 5, 6]), + (1720566881.524889, [], [1, 2, 3, 4, 5, 6]), + (1720566883.613068, [], [1, 2, 3, 4, 5, 6]), + (1720575742.398153, [], [1, 2, 3, 4, 5, 6]), + (1720575760.407369, [], [1, 2, 3, 4, 5, 6]), + (1720575762.530879, [], [1, 2, 3, 4, 5, 6]), + (1720623060.799492, [], [1, 2, 3, 4, 5, 6]), + (1720623163.775703, [], [1, 2, 3, 4, 5, 6]), + (1720623165.819144, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720490829.088908, [], [1, 2, 3, 4, 5, 6])], + [ + (1720222040.449568, [], [1, 2, 3, 4, 5, 6]), + (1720328183.580613, [], [1, 2, 3, 4, 5, 6]), + (1720581997.108309, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720631726.024509, [], [1, 2, 3, 4, 5, 6])], + [ + (1719969186.42388, [], [1, 2, 3, 4, 5, 6]), + (1720236467.453142, [], [1, 2, 3, 4, 5, 6]), + (1720460294.599805, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720328624.599959, [], [1, 2, 3, 4, 5, 6]), + (1720411036.02508, [], [1, 2, 3, 4, 5, 6]), + (1720470233.314202, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719966062.758672, [], [1, 2, 3, 4, 5, 6]), + (1720057471.705526, [], [1, 2, 3, 4, 5, 6]), + (1720325034.717518, [], [1, 2, 3, 4, 5, 6]), + (1720407309.902625, [], [1, 2, 3, 4, 5, 6]), + (1720573477.911506, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720063887.70449, [], [1, 2, 3, 4, 5, 6])], + [(1720343326.152899, [], [1, 2, 3, 4, 5, 6])], + [(1720411362.644921, [], [1, 2, 3, 4, 5, 6])], + [(1720395606.751317, [], [1, 2, 3, 4, 5, 6])], + [(1720155980.858558, [], [1, 2, 3, 4, 5, 6])], + [(1720450339.669296, [], [1, 2, 3, 4, 5, 6])], + [(1719981567.460091, [], [1, 2, 3, 4, 5, 6]), (1720587320.169523, [], [1, 2, 3, 4, 5, 6])], + [(1720245122.915738, [], [1, 2, 3, 4, 5, 6])], + [(1719970229.063219, [], [1, 2, 3, 4, 5, 6]), (1720488361.805483, [], [1, 2, 3, 4, 5, 6])], + [(1720320009.047059, [], [1, 2, 3, 4, 5, 6])], + [(1720139484.708505, [], [1, 2, 3, 4, 5, 6]), (1720396780.73649, [], [1, 2, 3, 4, 5, 6])], + [(1720238094.386701, [], [1, 2, 3, 4, 5, 6])], + [(1720627574.598265, [], [1, 2, 3, 4, 5, 6])], + [(1720136834.089355, [], [1, 2, 3, 4, 5, 6]), (1720396824.609765, [], [1, 2, 3, 4, 5, 6])], + [(1720225652.369657, [], [1, 2, 3, 4, 5, 6])], + [ + (1719982133.012616, [], [1, 2, 3, 4, 5, 6]), + (1719982177.631804, [], [1, 2, 3, 4, 5, 6]), + (1719982179.720602, [], [1, 2, 3, 4, 5, 6]), + (1720006591.274361, [], [1, 2, 3, 4, 5, 6]), + (1720006644.930183, [], [1, 2, 3, 4, 5, 6]), + (1720006647.03435, [], [1, 2, 3, 4, 5, 6]), + (1720460548.964008, [], [1, 2, 3, 4, 5, 6]), + (1720460614.237345, [], [1, 2, 3, 4, 5, 6]), + (1720460616.332418, [], [1, 2, 3, 4, 5, 6]), + (1720585282.645498, [], [1, 2, 3, 4, 5, 6]), + (1720585293.462072, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720370572.550118, [], [1, 2, 3, 4, 5, 6])], + [ + (1719973160.879923, [], [1, 2, 3, 4, 5, 6]), + (1720329101.982409, [], [1, 2, 3, 4, 5, 6]), + (1720581501.430356, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720063998.039445, [], [1, 2, 3, 4, 5, 6]), + (1720232764.384684, [], [1, 2, 3, 4, 5, 6]), + (1720502473.633051, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720153647.635598, [], [1, 2, 3, 4, 5, 6]), + (1720225923.85076, [], [1, 2, 3, 4, 5, 6]), + (1720413430.570698, [], [1, 2, 3, 4, 5, 6]), + (1720584131.042756, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719966759.043742, [], [1, 2, 3, 4, 5, 6])], + [(1720405936.570297, [], [1, 2, 3, 4, 5, 6])], + [(1720144919.519677, [], [1, 2, 3, 4, 5, 6]), (1720402676.1685, [], [1, 2, 3, 4, 5, 6])], + [(1720402994.034134, [], [1, 2, 3, 4, 5, 6]), (1720497341.728864, [], [1, 2, 3, 4, 5, 6])], + [ + (1719973785.927392, [], [1, 2, 3, 4, 5, 6]), + (1720142506.754009, [], [1, 2, 3, 4, 5, 6]), + (1720312482.395361, [], [1, 2, 3, 4, 5, 6]), + (1720578049.42885, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720143203.796648, [], [1, 2, 3, 4, 5, 6]), (1720504600.034248, [], [1, 2, 3, 4, 5, 6])], + [(1720138317.024564, [], [1, 2, 3, 4, 5, 6]), (1720307922.860078, [], [1, 2, 3, 4, 5, 6])], + [(1720576710.045341, [], [1, 2, 3, 4, 5, 6])], + [ + (1720237948.24219, [], [1, 2, 3, 4, 5, 6]), + (1720322691.233406, [], [1, 2, 3, 4, 5, 6]), + (1720412663.957815, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720342019.617667, [], [1, 2, 3, 4, 5, 6]), + (1720342090.227667, [], [1, 2, 3, 4, 5, 6]), + (1720342263.731169, [], [1, 2, 3, 4, 5, 6]), + (1720342307.569989, [], [1, 2, 3, 4, 5, 6]), + (1720342413.538738, [], [1, 2, 3, 4, 5, 6]), + (1720342570.868506, [], [1, 2, 3, 4, 5, 6]), + (1720342664.423143, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720053725.982337, [], [1, 2, 3, 4, 5, 6]), + (1720137089.95596, [], [1, 2, 3, 4, 5, 6]), + (1720250340.159455, [], [1, 2, 3, 4, 5, 6]), + (1720408080.82431, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720502215.54816, [], [1, 2, 3, 4, 5, 6])], + [(1720051018.757074, [], [1, 2, 3, 4, 5, 6]), (1720221304.68857, [], [1, 2, 3, 4, 5, 6])], + [ + (1720055639.220711, [], [1, 2, 3, 4, 5, 6]), + (1720242136.136068, [], [1, 2, 3, 4, 5, 6]), + (1720501308.452889, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720210019.832882, [], [1, 2, 3, 4, 5, 6])], + [(1720222496.41532, [], [1, 2, 3, 4, 5, 6])], + [(1720221892.596089, [], [1, 2, 3, 4, 5, 6]), (1720488555.303827, [], [1, 2, 3, 4, 5, 6])], + [ + (1720055240.779901, [], [1, 2, 3, 4, 5, 6]), + (1720485059.84637, [], [1, 2, 3, 4, 5, 6]), + (1720520102.630634, [], [1, 2, 3, 4, 5, 6]), + (1720591031.4354, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720570592.888394, [], [1, 2, 3, 4, 5, 6])], + [ + (1720059956.606064, [], [1, 2, 3, 4, 5, 6]), + (1720232781.82764, [], [1, 2, 3, 4, 5, 6]), + (1720489307.963369, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720466563.789269, [], [1, 2, 3, 4, 5, 6])], + [(1720120332.505828, [], [1, 2, 3, 4, 5, 6]), (1720501386.247192, [], [1, 2, 3, 4, 5, 6])], + [ + (1720045443.968104, [], [1, 2, 3, 4, 5, 6]), + (1720337612.000658, [], [1, 2, 3, 4, 5, 6]), + (1720484793.823359, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720240516.409323, [], [1, 2, 3, 4, 5, 6]), (1720508486.303913, [], [1, 2, 3, 4, 5, 6])], + [ + (1720056682.445295, [], [1, 2, 3, 4, 5, 6]), + (1720239570.480365, [], [1, 2, 3, 4, 5, 6]), + (1720399243.691516, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720065346.577694, [], [1, 2, 3, 4, 5, 6]), + (1720111179.563476, [], [1, 2, 3, 4, 5, 6]), + (1720152182.18393, [], [1, 2, 3, 4, 5, 6]), + (1720456368.150945, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720037842.027886, [], [1, 2, 3, 4, 5, 6])], + [(1720051512.155726, [], [1, 2, 3, 4, 5, 6]), (1720316085.436368, [], [1, 2, 3, 4, 5, 6])], + [ + (1720153922.872643, [], [1, 2, 3, 4, 5, 6]), + (1720316484.292604, [], [1, 2, 3, 4, 5, 6]), + (1720481626.562697, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720304528.044157, [], [1, 2, 3, 4, 5, 6]), (1720587171.914424, [], [1, 2, 3, 4, 5, 6])], + [ + (1719969690.052003, [], [1, 2, 3, 4, 5, 6]), + (1720098093.259497, [], [1, 2, 3, 4, 5, 6]), + (1720589467.401983, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720182994.851728, [], [1, 2, 3, 4, 5, 6]), (1720490206.204252, [], [1, 2, 3, 4, 5, 6])], + [(1720305269.133214, [], [1, 2, 3, 4, 5, 6]), (1720580679.401674, [], [1, 2, 3, 4, 5, 6])], + [(1720582113.001824, [], [1, 2, 3, 4, 5, 6])], + [ + (1719971867.373391, [], [1, 2, 3, 4, 5, 6]), + (1719971869.452767, [], [1, 2, 3, 4, 5, 6]), + (1719983561.418747, [], [1, 2, 3, 4, 5, 6]), + (1719983615.306689, [], [1, 2, 3, 4, 5, 6]), + (1719983617.371374, [], [1, 2, 3, 4, 5, 6]), + (1719983622.154397, [], [1, 2, 3, 4, 5, 6]), + (1719983624.239597, [], [1, 2, 3, 4, 5, 6]), + (1720057585.854293, [], [1, 2, 3, 4, 5, 6]), + (1720127843.991043, [], [1, 2, 3, 4, 5, 6]), + (1720127952.545227, [], [1, 2, 3, 4, 5, 6]), + (1720150451.197164, [], [1, 2, 3, 4, 5, 6]), + (1720150472.889245, [], [1, 2, 3, 4, 5, 6]), + (1720229579.372015, [], [1, 2, 3, 4, 5, 6]), + (1720229585.29839, [], [1, 2, 3, 4, 5, 6]), + (1720229587.33746, [], [1, 2, 3, 4, 5, 6]), + (1720272362.151724, [], [1, 2, 3, 4, 5, 6]), + (1720272395.494166, [], [1, 2, 3, 4, 5, 6]), + (1720272397.584197, [], [1, 2, 3, 4, 5, 6]), + (1720325287.360716, [], [1, 2, 3, 4, 5, 6]), + (1720325289.430457, [], [1, 2, 3, 4, 5, 6]), + (1720392144.674955, [], [1, 2, 3, 4, 5, 6]), + (1720392146.786158, [], [1, 2, 3, 4, 5, 6]), + (1720406690.885685, [], [1, 2, 3, 4, 5, 6]), + (1720406692.950513, [], [1, 2, 3, 4, 5, 6]), + (1720486441.134231, [], [1, 2, 3, 4, 5, 6]), + (1720486443.192435, [], [1, 2, 3, 4, 5, 6]), + (1720648828.296221, [], [1, 2, 3, 4, 5, 6]), + (1720648830.340132, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719977053.236432, [], [1, 2, 3, 4, 5, 6])], + [(1720146886.388756, [], [1, 2, 3, 4, 5, 6])], + [(1720147721.983335, [], [1, 2, 3, 4, 5, 6]), (1720577319.095652, [], [1, 2, 3, 4, 5, 6])], + [(1720187232.833461, [], [1, 2, 3, 4, 5, 6])], + [(1720309745.334443, [], [1, 2, 3, 4, 5, 6]), (1720525020.981442, [], [1, 2, 3, 4, 5, 6])], + [(1719985270.896874, [], [1, 2, 3, 4, 5, 6]), (1720147203.361104, [], [1, 2, 3, 4, 5, 6])], + [(1719975189.590595, [], [1, 2, 3, 4, 5, 6])], + [ + (1720153681.561666, [], [1, 2, 3, 4, 5, 6]), + (1720315141.854012, [], [1, 2, 3, 4, 5, 6]), + (1720483759.06017, [], [1, 2, 3, 4, 5, 6]), + (1720632532.362134, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719969377.021113, [], [1, 2, 3, 4, 5, 6]), + (1719969547.865829, [], [1, 2, 3, 4, 5, 6]), + (1720050670.589639, [], [1, 2, 3, 4, 5, 6]), + (1720050713.412665, [], [1, 2, 3, 4, 5, 6]), + (1720139076.150907, [], [1, 2, 3, 4, 5, 6]), + (1720139087.933212, [], [1, 2, 3, 4, 5, 6]), + (1720139090.022598, [], [1, 2, 3, 4, 5, 6]), + (1720139092.090332, [], [1, 2, 3, 4, 5, 6]), + (1720148904.698605, [], [1, 2, 3, 4, 5, 6]), + (1720197454.202625, [], [1, 2, 3, 4, 5, 6]), + (1720197456.301898, [], [1, 2, 3, 4, 5, 6]), + (1720221957.937687, [], [1, 2, 3, 4, 5, 6]), + (1720222151.210074, [], [1, 2, 3, 4, 5, 6]), + (1720222153.281944, [], [1, 2, 3, 4, 5, 6]), + (1720231319.785278, [], [1, 2, 3, 4, 5, 6]), + (1720314287.823226, [], [1, 2, 3, 4, 5, 6]), + (1720314375.707773, [], [1, 2, 3, 4, 5, 6]), + (1720314377.787834, [], [1, 2, 3, 4, 5, 6]), + (1720331369.745063, [], [1, 2, 3, 4, 5, 6]), + (1720331582.949466, [], [1, 2, 3, 4, 5, 6]), + (1720331585.058912, [], [1, 2, 3, 4, 5, 6]), + (1720399235.526545, [], [1, 2, 3, 4, 5, 6]), + (1720399237.6268, [], [1, 2, 3, 4, 5, 6]), + (1720410762.341061, [], [1, 2, 3, 4, 5, 6]), + (1720410808.990309, [], [1, 2, 3, 4, 5, 6]), + (1720410811.040448, [], [1, 2, 3, 4, 5, 6]), + (1720493330.828194, [], [1, 2, 3, 4, 5, 6]), + (1720493516.887173, [], [1, 2, 3, 4, 5, 6]), + (1720501442.580123, [], [1, 2, 3, 4, 5, 6]), + (1720501548.316894, [], [1, 2, 3, 4, 5, 6]), + (1720501550.379738, [], [1, 2, 3, 4, 5, 6]), + (1720573012.279738, [], [1, 2, 3, 4, 5, 6]), + (1720573204.24471, [], [1, 2, 3, 4, 5, 6]), + (1720573206.359087, [], [1, 2, 3, 4, 5, 6]), + (1720573210.996145, [], [1, 2, 3, 4, 5, 6]), + (1720573213.096745, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719984464.146305, [], [1, 2, 3, 4, 5, 6])], + [(1719969484.575186, [], [1, 2, 3, 4, 5, 6])], + [(1719967098.321792, [], [1, 2, 3, 4, 5, 6]), (1720140304.171738, [], [1, 2, 3, 4, 5, 6])], + [ + (1720067679.407113, [], [1, 2, 3, 4, 5, 6]), + (1720240007.297001, [], [1, 2, 3, 4, 5, 6]), + (1720499615.946055, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719984745.989291, [], [1, 2, 3, 4, 5, 6]), + (1720088802.060799, [], [1, 2, 3, 4, 5, 6]), + (1720226330.102201, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720057325.702019, [], [1, 2, 3, 4, 5, 6]), (1720499465.567145, [], [1, 2, 3, 4, 5, 6])], + [ + (1720055478.668518, [], [1, 2, 3, 4, 5, 6]), + (1720055619.422527, [], [1, 2, 3, 4, 5, 6]), + (1720138025.392906, [], [1, 2, 3, 4, 5, 6]), + (1720138074.32289, [], [1, 2, 3, 4, 5, 6]), + (1720232045.90559, [], [1, 2, 3, 4, 5, 6]), + (1720232073.337701, [], [1, 2, 3, 4, 5, 6]), + (1720278094.793407, [], [1, 2, 3, 4, 5, 6]), + (1720278096.912409, [], [1, 2, 3, 4, 5, 6]), + (1720278099.569789, [], [1, 2, 3, 4, 5, 6]), + (1720278101.660519, [], [1, 2, 3, 4, 5, 6]), + (1720324663.973123, [], [1, 2, 3, 4, 5, 6]), + (1720324666.034118, [], [1, 2, 3, 4, 5, 6]), + (1720412864.0991, [], [1, 2, 3, 4, 5, 6]), + (1720412932.896312, [], [1, 2, 3, 4, 5, 6]), + (1720412934.95735, [], [1, 2, 3, 4, 5, 6]), + (1720493768.204791, [], [1, 2, 3, 4, 5, 6]), + (1720493848.668367, [], [1, 2, 3, 4, 5, 6]), + (1720493850.800293, [], [1, 2, 3, 4, 5, 6]), + (1720493853.855696, [], [1, 2, 3, 4, 5, 6]), + (1720578407.565863, [], [1, 2, 3, 4, 5, 6]), + (1720578455.012928, [], [1, 2, 3, 4, 5, 6]), + (1720578457.12311, [], [1, 2, 3, 4, 5, 6]), + (1720592507.954368, [], [1, 2, 3, 4, 5, 6]), + (1720592695.674207, [], [1, 2, 3, 4, 5, 6]), + (1720592697.763035, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720448820.538088, [], [1, 2, 3, 4, 5, 6]), + (1720448848.354821, [], [1, 2, 3, 4, 5, 6]), + (1720448968.980221, [], [1, 2, 3, 4, 5, 6]), + (1720449109.647373, [], [1, 2, 3, 4, 5, 6]), + (1720449132.605916, [], [1, 2, 3, 4, 5, 6]), + (1720449141.226924, [], [1, 2, 3, 4, 5, 6]), + (1720449174.132961, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720143295.563285, [], [1, 2, 3, 4, 5, 6])], + [ + (1720009875.184202, [], [1, 2, 3, 4, 5, 6]), + (1720064301.403426, [], [1, 2, 3, 4, 5, 6]), + (1720221459.433168, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720316122.630709, [], [1, 2, 3, 4, 5, 6]), (1720625396.811387, [], [1, 2, 3, 4, 5, 6])], + [(1720064525.079458, [], [1, 2, 3, 4, 5, 6])], + [(1720600790.059805, [], [1, 2, 3, 4, 5, 6])], + [(1720053513.239524, [], [1, 2, 3, 4, 5, 6]), (1720533559.490134, [], [1, 2, 3, 4, 5, 6])], + [(1720222657.803241, [], [1, 2, 3, 4, 5, 6])], + [ + (1719971419.792625, [], [1, 2, 3, 4, 5, 6]), + (1720239049.653382, [], [1, 2, 3, 4, 5, 6]), + (1720497253.487835, [], [1, 2, 3, 4, 5, 6]), + (1720571009.60795, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719978213.57048, [], [1, 2, 3, 4, 5, 6]), + (1719978402.543586, [], [1, 2, 3, 4, 5, 6]), + (1720067921.564313, [], [1, 2, 3, 4, 5, 6]), + (1720068031.2973, [], [1, 2, 3, 4, 5, 6]), + (1720068033.364045, [], [1, 2, 3, 4, 5, 6]), + (1720076693.193638, [], [1, 2, 3, 4, 5, 6]), + (1720076695.234922, [], [1, 2, 3, 4, 5, 6]), + (1720088372.082518, [], [1, 2, 3, 4, 5, 6]), + (1720088448.747115, [], [1, 2, 3, 4, 5, 6]), + (1720222636.476764, [], [1, 2, 3, 4, 5, 6]), + (1720222701.214913, [], [1, 2, 3, 4, 5, 6]), + (1720311136.481341, [], [1, 2, 3, 4, 5, 6]), + (1720311279.356667, [], [1, 2, 3, 4, 5, 6]), + (1720311281.435353, [], [1, 2, 3, 4, 5, 6]), + (1720321937.516249, [], [1, 2, 3, 4, 5, 6]), + (1720321977.750869, [], [1, 2, 3, 4, 5, 6]), + (1720321979.826956, [], [1, 2, 3, 4, 5, 6]), + (1720321983.309368, [], [1, 2, 3, 4, 5, 6]), + (1720417820.177018, [], [1, 2, 3, 4, 5, 6]), + (1720417888.907443, [], [1, 2, 3, 4, 5, 6]), + (1720482544.485269, [], [1, 2, 3, 4, 5, 6]), + (1720482650.874077, [], [1, 2, 3, 4, 5, 6]), + (1720571012.586842, [], [1, 2, 3, 4, 5, 6]), + (1720571014.653099, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720067135.000485, [], [1, 2, 3, 4, 5, 6]), + (1720226886.323383, [], [1, 2, 3, 4, 5, 6]), + (1720626810.190995, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720021468.494681, [], [1, 2, 3, 4, 5, 6]), (1720244311.296556, [], [1, 2, 3, 4, 5, 6])], + [ + (1720054497.052088, [], [1, 2, 3, 4, 5, 6]), + (1720315797.04068, [], [1, 2, 3, 4, 5, 6]), + (1720396623.976121, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719970439.050635, [], [1, 2, 3, 4, 5, 6]), (1720411294.606462, [], [1, 2, 3, 4, 5, 6])], + [ + (1720047660.240807, [], [1, 2, 3, 4, 5, 6]), + (1720209425.126479, [], [1, 2, 3, 4, 5, 6]), + (1720417042.301423, [], [1, 2, 3, 4, 5, 6]), + (1720579466.836909, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720068666.058135, [], [1, 2, 3, 4, 5, 6]), + (1720224717.712974, [], [1, 2, 3, 4, 5, 6]), + (1720313644.184984, [], [1, 2, 3, 4, 5, 6]), + (1720417247.572309, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720217112.012918, [], [1, 2, 3, 4, 5, 6])], + [(1720228893.793094, [], [1, 2, 3, 4, 5, 6])], + [ + (1719965114.583168, [], [1, 2, 3, 4, 5, 6]), + (1720221700.128257, [], [1, 2, 3, 4, 5, 6]), + (1720359492.65181, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720149938.452021, [], [1, 2, 3, 4, 5, 6]), + (1720150083.518978, [], [1, 2, 3, 4, 5, 6]), + (1720150100.711862, [], [1, 2, 3, 4, 5, 6]), + (1720403516.136956, [], [1, 2, 3, 4, 5, 6]), + (1720403602.399166, [], [1, 2, 3, 4, 5, 6]), + (1720403688.061721, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720016151.530651, [], [1, 2, 3, 4, 5, 6]), + (1720126052.51206, [], [1, 2, 3, 4, 5, 6]), + (1720243360.967974, [], [1, 2, 3, 4, 5, 6]), + (1720567481.805169, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720259903.388453, [], [1, 2, 3, 4, 5, 6]), (1720495071.607118, [], [1, 2, 3, 4, 5, 6])], + [ + (1719978731.351246, [], [1, 2, 3, 4, 5, 6]), + (1720142275.008236, [], [1, 2, 3, 4, 5, 6]), + (1720225627.748133, [], [1, 2, 3, 4, 5, 6]), + (1720599835.060544, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720308817.017884, [], [1, 2, 3, 4, 5, 6]), (1720500376.721695, [], [1, 2, 3, 4, 5, 6])], + [ + (1720062080.162523, [], [1, 2, 3, 4, 5, 6]), + (1720424051.051867, [], [1, 2, 3, 4, 5, 6]), + (1720577193.657241, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720226358.301934, [], [1, 2, 3, 4, 5, 6]), (1720611516.599998, [], [1, 2, 3, 4, 5, 6])], + [(1720142831.087971, [], [1, 2, 3, 4, 5, 6]), (1720568727.59182, [], [1, 2, 3, 4, 5, 6])], + [(1720045127.801767, [], [1, 2, 3, 4, 5, 6])], + [(1720598254.557545, [], [1, 2, 3, 4, 5, 6])], + [(1720230498.737196, [], [1, 2, 3, 4, 5, 6]), (1720502519.921733, [], [1, 2, 3, 4, 5, 6])], + [(1720149819.132452, [], [1, 2, 3, 4, 5, 6]), (1720317818.669453, [], [1, 2, 3, 4, 5, 6])], + [(1719965630.184525, [], [1, 2, 3, 4, 5, 6]), (1720566194.006106, [], [1, 2, 3, 4, 5, 6])], + [(1719996710.23806, [], [1, 2, 3, 4, 5, 6])], + [(1720053587.04154, [], [1, 2, 3, 4, 5, 6]), (1720476400.319672, [], [1, 2, 3, 4, 5, 6])], + [(1720238998.499612, [], [1, 2, 3, 4, 5, 6])], + [(1720049964.339669, [], [1, 2, 3, 4, 5, 6]), (1720503256.459045, [], [1, 2, 3, 4, 5, 6])], + [(1720629914.75266, [], [1, 2, 3, 4, 5, 6])], + [ + (1720067406.552276, [], [1, 2, 3, 4, 5, 6]), + (1720192823.078475, [], [1, 2, 3, 4, 5, 6]), + (1720615636.068682, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720050726.320669, [], [1, 2, 3, 4, 5, 6]), + (1720159164.117987, [], [1, 2, 3, 4, 5, 6]), + (1720583837.972687, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720470505.483407, [], [1, 2, 3, 4, 5, 6]), (1720589293.330858, [], [1, 2, 3, 4, 5, 6])], + [ + (1719990309.924021, [], [1, 2, 3, 4, 5, 6]), + (1720242385.881249, [], [1, 2, 3, 4, 5, 6]), + (1720648573.041044, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720059240.88106, [], [1, 2, 3, 4, 5, 6]), (1720486550.385795, [], [1, 2, 3, 4, 5, 6])], + [(1720232277.114726, [], [1, 2, 3, 4, 5, 6])], + [(1720156360.413945, [], [1, 2, 3, 4, 5, 6]), (1720415380.907597, [], [1, 2, 3, 4, 5, 6])], + [(1720143721.130937, [], [1, 2, 3, 4, 5, 6])], + [(1720093040.94431, [], [1, 2, 3, 4, 5, 6]), (1720230385.831757, [], [1, 2, 3, 4, 5, 6])], + [(1720313919.101562, [], [1, 2, 3, 4, 5, 6]), (1720600894.542752, [], [1, 2, 3, 4, 5, 6])], + [(1720008883.059792, [], [1, 2, 3, 4, 5, 6]), (1720151981.800615, [], [1, 2, 3, 4, 5, 6])], + [(1720583883.771582, [], [1, 2, 3, 4, 5, 6])], + [(1720054595.476172, [], [1, 2, 3, 4, 5, 6]), (1720494101.96425, [], [1, 2, 3, 4, 5, 6])], + [ + (1719975987.869421, [], [1, 2, 3, 4, 5, 6]), + (1720072012.445937, [], [1, 2, 3, 4, 5, 6]), + (1720141541.892965, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719974700.775073, [], [1, 2, 3, 4, 5, 6]), + (1719974810.784479, [], [1, 2, 3, 4, 5, 6]), + (1720022010.687673, [], [1, 2, 3, 4, 5, 6]), + (1720022125.15125, [], [1, 2, 3, 4, 5, 6]), + (1720022127.183082, [], [1, 2, 3, 4, 5, 6]), + (1720022127.850327, [], [1, 2, 3, 4, 5, 6]), + (1720022129.972437, [], [1, 2, 3, 4, 5, 6]), + (1720149346.519881, [], [1, 2, 3, 4, 5, 6]), + (1720149423.39517, [], [1, 2, 3, 4, 5, 6]), + (1720232515.945385, [], [1, 2, 3, 4, 5, 6]), + (1720232642.031851, [], [1, 2, 3, 4, 5, 6]), + (1720232644.151326, [], [1, 2, 3, 4, 5, 6]), + (1720232649.102724, [], [1, 2, 3, 4, 5, 6]), + (1720232651.213687, [], [1, 2, 3, 4, 5, 6]), + (1720291284.378849, [], [1, 2, 3, 4, 5, 6]), + (1720291307.651917, [], [1, 2, 3, 4, 5, 6]), + (1720317724.65539, [], [1, 2, 3, 4, 5, 6]), + (1720317962.176994, [], [1, 2, 3, 4, 5, 6]), + (1720317962.221761, [], [1, 2, 3, 4, 5, 6]), + (1720317967.870483, [], [1, 2, 3, 4, 5, 6]), + (1720416284.403485, [], [1, 2, 3, 4, 5, 6]), + (1720416286.45094, [], [1, 2, 3, 4, 5, 6]), + (1720446964.44037, [], [1, 2, 3, 4, 5, 6]), + (1720447111.491786, [], [1, 2, 3, 4, 5, 6]), + (1720447113.551591, [], [1, 2, 3, 4, 5, 6]), + (1720500857.609857, [], [1, 2, 3, 4, 5, 6]), + (1720500933.241251, [], [1, 2, 3, 4, 5, 6]), + (1720500935.342693, [], [1, 2, 3, 4, 5, 6]), + (1720550391.631024, [], [1, 2, 3, 4, 5, 6]), + (1720550393.677097, [], [1, 2, 3, 4, 5, 6]), + (1720571962.115275, [], [1, 2, 3, 4, 5, 6]), + (1720571964.156322, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720300973.659698, [], [1, 2, 3, 4, 5, 6]), (1720502088.420309, [], [1, 2, 3, 4, 5, 6])], + [ + (1720226060.114355, [], [1, 2, 3, 4, 5, 6]), + (1720367668.242413, [], [1, 2, 3, 4, 5, 6]), + (1720580879.469873, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720148122.993839, [], [1, 2, 3, 4, 5, 6]), + (1720283848.988921, [], [1, 2, 3, 4, 5, 6]), + (1720392902.670008, [], [1, 2, 3, 4, 5, 6]), + (1720547569.939146, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720164561.277691, [], [1, 2, 3, 4, 5, 6]), (1720322322.293618, [], [1, 2, 3, 4, 5, 6])], + [(1720394391.029382, [], [1, 2, 3, 4, 5, 6])], + [(1720578227.91725, [], [1, 2, 3, 4, 5, 6])], + [(1720427348.104988, [], [1, 2, 3, 4, 5, 6]), (1720586312.438776, [], [1, 2, 3, 4, 5, 6])], + [ + (1719967279.972433, [], [1, 2, 3, 4, 5, 6]), + (1719967282.055508, [], [1, 2, 3, 4, 5, 6]), + (1719986090.097845, [], [1, 2, 3, 4, 5, 6]), + (1719986173.00659, [], [1, 2, 3, 4, 5, 6]), + (1719986175.082864, [], [1, 2, 3, 4, 5, 6]), + (1720034526.875582, [], [1, 2, 3, 4, 5, 6]), + (1720061201.725715, [], [1, 2, 3, 4, 5, 6]), + (1720061294.240057, [], [1, 2, 3, 4, 5, 6]), + (1720061296.35589, [], [1, 2, 3, 4, 5, 6]), + (1720155141.396653, [], [1, 2, 3, 4, 5, 6]), + (1720155143.510508, [], [1, 2, 3, 4, 5, 6]), + (1720155145.301155, [], [1, 2, 3, 4, 5, 6]), + (1720155147.393972, [], [1, 2, 3, 4, 5, 6]), + (1720231098.024705, [], [1, 2, 3, 4, 5, 6]), + (1720231317.54759, [], [1, 2, 3, 4, 5, 6]), + (1720231319.611985, [], [1, 2, 3, 4, 5, 6]), + (1720271983.621164, [], [1, 2, 3, 4, 5, 6]), + (1720271985.710974, [], [1, 2, 3, 4, 5, 6]), + (1720316981.40392, [], [1, 2, 3, 4, 5, 6]), + (1720317019.941522, [], [1, 2, 3, 4, 5, 6]), + (1720317022.040965, [], [1, 2, 3, 4, 5, 6]), + (1720411936.226228, [], [1, 2, 3, 4, 5, 6]), + (1720411963.208146, [], [1, 2, 3, 4, 5, 6]), + (1720479757.589657, [], [1, 2, 3, 4, 5, 6]), + (1720479839.302922, [], [1, 2, 3, 4, 5, 6]), + (1720582109.835415, [], [1, 2, 3, 4, 5, 6]), + (1720582111.914294, [], [1, 2, 3, 4, 5, 6]), + (1720652093.707438, [], [1, 2, 3, 4, 5, 6]), + (1720652211.598303, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720579951.356488, [], [1, 2, 3, 4, 5, 6])], + [(1720593973.655643, [], [1, 2, 3, 4, 5, 6])], + [ + (1720061475.003195, [], [1, 2, 3, 4, 5, 6]), + (1720270392.101123, [], [1, 2, 3, 4, 5, 6]), + (1720415797.057544, [], [1, 2, 3, 4, 5, 6]), + (1720574029.592383, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719987865.032004, [], [1, 2, 3, 4, 5, 6]), (1720212776.214811, [], [1, 2, 3, 4, 5, 6])], + [(1720315089.869542, [], [1, 2, 3, 4, 5, 6]), (1720578088.622431, [], [1, 2, 3, 4, 5, 6])], + [(1720575422.335555, [], [1, 2, 3, 4, 5, 6])], + [(1720329438.482756, [], [1, 2, 3, 4, 5, 6]), (1720443842.432414, [], [1, 2, 3, 4, 5, 6])], + [ + (1720135846.308239, [], [1, 2, 3, 4, 5, 6]), + (1720221161.535587, [], [1, 2, 3, 4, 5, 6]), + (1720326226.738859, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719977789.721113, [], [1, 2, 3, 4, 5, 6]), + (1719977899.031956, [], [1, 2, 3, 4, 5, 6]), + (1719977901.119465, [], [1, 2, 3, 4, 5, 6]), + (1719982587.985388, [], [1, 2, 3, 4, 5, 6]), + (1719982666.211377, [], [1, 2, 3, 4, 5, 6]), + (1719982668.29279, [], [1, 2, 3, 4, 5, 6]), + (1719982672.56956, [], [1, 2, 3, 4, 5, 6]), + (1720063592.708606, [], [1, 2, 3, 4, 5, 6]), + (1720063594.776009, [], [1, 2, 3, 4, 5, 6]), + (1720145103.906614, [], [1, 2, 3, 4, 5, 6]), + (1720145165.665926, [], [1, 2, 3, 4, 5, 6]), + (1720157026.459569, [], [1, 2, 3, 4, 5, 6]), + (1720223512.011646, [], [1, 2, 3, 4, 5, 6]), + (1720223586.453989, [], [1, 2, 3, 4, 5, 6]), + (1720223588.535794, [], [1, 2, 3, 4, 5, 6]), + (1720329405.565358, [], [1, 2, 3, 4, 5, 6]), + (1720398313.307695, [], [1, 2, 3, 4, 5, 6]), + (1720398429.724071, [], [1, 2, 3, 4, 5, 6]), + (1720414381.775047, [], [1, 2, 3, 4, 5, 6]), + (1720446240.471098, [], [1, 2, 3, 4, 5, 6]), + (1720481889.793923, [], [1, 2, 3, 4, 5, 6]), + (1720481891.93036, [], [1, 2, 3, 4, 5, 6]), + (1720489136.015971, [], [1, 2, 3, 4, 5, 6]), + (1720489247.728734, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720318482.752639, [], [1, 2, 3, 4, 5, 6])], + [ + (1720242162.48487, [], [1, 2, 3, 4, 5, 6]), + (1720503535.294123, [], [1, 2, 3, 4, 5, 6]), + (1720590538.582039, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720148407.104121, [], [1, 2, 3, 4, 5, 6])], + [(1720154168.367205, [], [1, 2, 3, 4, 5, 6]), (1720568213.544423, [], [1, 2, 3, 4, 5, 6])], + [ + (1720069001.717509, [], [1, 2, 3, 4, 5, 6]), + (1720346135.538471, [], [1, 2, 3, 4, 5, 6]), + (1720489854.284499, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719967260.706099, [], [1, 2, 3, 4, 5, 6]), + (1720082538.484733, [], [1, 2, 3, 4, 5, 6]), + (1720240732.567635, [], [1, 2, 3, 4, 5, 6]), + (1720395713.187024, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719968640.279026, [], [1, 2, 3, 4, 5, 6]), + (1720058387.048155, [], [1, 2, 3, 4, 5, 6]), + (1720240163.514327, [], [1, 2, 3, 4, 5, 6]), + (1720391336.792179, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720310285.653948, [], [1, 2, 3, 4, 5, 6]), (1720589147.207321, [], [1, 2, 3, 4, 5, 6])], + [(1719973140.021275, [], [1, 2, 3, 4, 5, 6]), (1720504055.006021, [], [1, 2, 3, 4, 5, 6])], + [(1720115792.85023, [], [1, 2, 3, 4, 5, 6])], + [ + (1720140689.444004, [], [1, 2, 3, 4, 5, 6]), + (1720312169.980048, [], [1, 2, 3, 4, 5, 6]), + (1720399894.527727, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720316227.642169, [], [1, 2, 3, 4, 5, 6]), (1720484121.740556, [], [1, 2, 3, 4, 5, 6])], + [ + (1720150629.632571, [], [1, 2, 3, 4, 5, 6]), + (1720312593.72112, [], [1, 2, 3, 4, 5, 6]), + (1720584121.246833, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719966806.196729, [], [1, 2, 3, 4, 5, 6]), (1720492831.262792, [], [1, 2, 3, 4, 5, 6])], + [ + (1720069584.25825, [], [1, 2, 3, 4, 5, 6]), + (1720233172.76065, [], [1, 2, 3, 4, 5, 6]), + (1720317363.164219, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720227600.733956, [], [1, 2, 3, 4, 5, 6]), + (1720227600.784387, [], [1, 2, 3, 4, 5, 6]), + (1720227605.27419, [], [1, 2, 3, 4, 5, 6]), + (1720269710.791405, [], [1, 2, 3, 4, 5, 6]), + (1720269759.332462, [], [1, 2, 3, 4, 5, 6]), + (1720326344.424672, [], [1, 2, 3, 4, 5, 6]), + (1720369614.287387, [], [1, 2, 3, 4, 5, 6]), + (1720369719.27491, [], [1, 2, 3, 4, 5, 6]), + (1720369719.331218, [], [1, 2, 3, 4, 5, 6]), + (1720369721.899004, [], [1, 2, 3, 4, 5, 6]), + (1720385493.685201, [], [1, 2, 3, 4, 5, 6]), + (1720385551.219825, [], [1, 2, 3, 4, 5, 6]), + (1720385553.316418, [], [1, 2, 3, 4, 5, 6]), + (1720450115.39061, [], [1, 2, 3, 4, 5, 6]), + (1720450117.502598, [], [1, 2, 3, 4, 5, 6]), + (1720450118.78177, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719969800.978378, [], [1, 2, 3, 4, 5, 6]), + (1720222415.35262, [], [1, 2, 3, 4, 5, 6]), + (1720434706.74629, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720237451.24597, [], [1, 2, 3, 4, 5, 6]), (1720585495.150654, [], [1, 2, 3, 4, 5, 6])], + [(1719970937.04025, [], [1, 2, 3, 4, 5, 6])], + [ + (1719983075.420902, [], [1, 2, 3, 4, 5, 6]), + (1720313367.078665, [], [1, 2, 3, 4, 5, 6]), + (1720413122.113225, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720155749.238687, [], [1, 2, 3, 4, 5, 6]), (1720491353.243799, [], [1, 2, 3, 4, 5, 6])], + [(1720060021.000595, [], [1, 2, 3, 4, 5, 6])], + [(1719988378.536367, [], [1, 2, 3, 4, 5, 6]), (1720228662.183092, [], [1, 2, 3, 4, 5, 6])], + [(1719981886.782157, [], [1, 2, 3, 4, 5, 6]), (1720156878.496962, [], [1, 2, 3, 4, 5, 6])], + [(1720582313.689559, [], [1, 2, 3, 4, 5, 6])], + [(1720232302.477057, [], [1, 2, 3, 4, 5, 6]), (1720493756.958556, [], [1, 2, 3, 4, 5, 6])], + [(1720295778.241704, [], [1, 2, 3, 4, 5, 6])], + [(1720021503.203052, [], [1, 2, 3, 4, 5, 6]), (1720325452.491454, [], [1, 2, 3, 4, 5, 6])], + [(1720527219.478404, [], [1, 2, 3, 4, 5, 6]), (1720567646.306507, [], [1, 2, 3, 4, 5, 6])], + [(1720223792.29193, [], [1, 2, 3, 4, 5, 6])], + [ + (1720006636.772706, [], [1, 2, 3, 4, 5, 6]), + (1720006795.60427, [], [1, 2, 3, 4, 5, 6]), + (1720006845.799981, [], [1, 2, 3, 4, 5, 6]), + (1720007022.741945, [], [1, 2, 3, 4, 5, 6]), + (1720007095.581047, [], [1, 2, 3, 4, 5, 6]), + (1720007134.850115, [], [1, 2, 3, 4, 5, 6]), + (1720025117.762503, [], [1, 2, 3, 4, 5, 6]), + (1720025308.512649, [], [1, 2, 3, 4, 5, 6]), + (1720025310.568037, [], [1, 2, 3, 4, 5, 6]), + (1720052547.163003, [], [1, 2, 3, 4, 5, 6]), + (1720052600.03312, [], [1, 2, 3, 4, 5, 6]), + (1720068046.902248, [], [1, 2, 3, 4, 5, 6]), + (1720068213.189912, [], [1, 2, 3, 4, 5, 6]), + (1720144711.311281, [], [1, 2, 3, 4, 5, 6]), + (1720144713.407177, [], [1, 2, 3, 4, 5, 6]), + (1720222638.332245, [], [1, 2, 3, 4, 5, 6]), + (1720222640.418838, [], [1, 2, 3, 4, 5, 6]), + (1720242141.813366, [], [1, 2, 3, 4, 5, 6]), + (1720242245.921587, [], [1, 2, 3, 4, 5, 6]), + (1720242248.011768, [], [1, 2, 3, 4, 5, 6]), + (1720333146.03005, [], [1, 2, 3, 4, 5, 6]), + (1720333287.562561, [], [1, 2, 3, 4, 5, 6]), + (1720333289.592652, [], [1, 2, 3, 4, 5, 6]), + (1720333292.319879, [], [1, 2, 3, 4, 5, 6]), + (1720333294.386109, [], [1, 2, 3, 4, 5, 6]), + (1720396984.211837, [], [1, 2, 3, 4, 5, 6]), + (1720397094.401782, [], [1, 2, 3, 4, 5, 6]), + (1720486134.144443, [], [1, 2, 3, 4, 5, 6]), + (1720486136.211044, [], [1, 2, 3, 4, 5, 6]), + (1720486140.873481, [], [1, 2, 3, 4, 5, 6]), + (1720486142.970428, [], [1, 2, 3, 4, 5, 6]), + (1720497754.706526, [], [1, 2, 3, 4, 5, 6]), + (1720497979.155047, [], [1, 2, 3, 4, 5, 6]), + (1720531991.462042, [], [1, 2, 3, 4, 5, 6]), + (1720532199.030662, [], [1, 2, 3, 4, 5, 6]), + (1720588796.771517, [], [1, 2, 3, 4, 5, 6]), + (1720588842.077879, [], [1, 2, 3, 4, 5, 6]), + (1720588844.116306, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720239926.764196, [], [1, 2, 3, 4, 5, 6])], + [(1720395045.1902, [], [1, 2, 3, 4, 5, 6])], + [(1720431147.297621, [], [1, 2, 3, 4, 5, 6])], + [(1720240748.713179, [], [1, 2, 3, 4, 5, 6])], + [(1719972432.742571, [], [1, 2, 3, 4, 5, 6])], + [(1720410198.607466, [], [1, 2, 3, 4, 5, 6]), (1720566548.549011, [], [1, 2, 3, 4, 5, 6])], + [(1720455428.865155, [], [1, 2, 3, 4, 5, 6])], + [(1720498325.755933, [], [1, 2, 3, 4, 5, 6])], + [ + (1719983684.033908, [], [1, 2, 3, 4, 5, 6]), + (1720319741.991515, [], [1, 2, 3, 4, 5, 6]), + (1720414800.645761, [], [1, 2, 3, 4, 5, 6]), + (1720484979.12583, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720134283.600008, [], [1, 2, 3, 4, 5, 6])], + [(1720409485.01654, [], [1, 2, 3, 4, 5, 6])], + [ + (1719981074.661088, [], [1, 2, 3, 4, 5, 6]), + (1720143880.41593, [], [1, 2, 3, 4, 5, 6]), + (1720229983.175788, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720060903.203334, [], [1, 2, 3, 4, 5, 6])], + [ + (1719968419.743912, [], [1, 2, 3, 4, 5, 6]), + (1719968490.438903, [], [1, 2, 3, 4, 5, 6]), + (1719968620.857174, [], [1, 2, 3, 4, 5, 6]), + (1720498112.351156, [], [1, 2, 3, 4, 5, 6]), + (1720498358.36836, [], [1, 2, 3, 4, 5, 6]), + (1720498468.250047, [], [1, 2, 3, 4, 5, 6]), + (1720574778.111823, [], [1, 2, 3, 4, 5, 6]), + (1720574806.5479, [], [1, 2, 3, 4, 5, 6]), + (1720574917.425735, [], [1, 2, 3, 4, 5, 6]), + (1720574933.603291, [], [1, 2, 3, 4, 5, 6]), + (1720575020.164914, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720406407.483318, [], [1, 2, 3, 4, 5, 6])], + [(1720412099.352018, [], [1, 2, 3, 4, 5, 6]), (1720498223.084881, [], [1, 2, 3, 4, 5, 6])], + [ + (1719979024.598321, [], [1, 2, 3, 4, 5, 6]), + (1720114791.984992, [], [1, 2, 3, 4, 5, 6]), + (1720241390.157269, [], [1, 2, 3, 4, 5, 6]), + (1720500283.345509, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720569522.382331, [], [1, 2, 3, 4, 5, 6])], + [(1720146840.111051, [], [1, 2, 3, 4, 5, 6]), (1720287268.372643, [], [1, 2, 3, 4, 5, 6])], + [(1720583508.926048, [], [1, 2, 3, 4, 5, 6])], + [ + (1720229146.528014, [], [1, 2, 3, 4, 5, 6]), + (1720229340.131801, [], [1, 2, 3, 4, 5, 6]), + (1720229424.480475, [], [1, 2, 3, 4, 5, 6]), + (1720229565.859999, [], [1, 2, 3, 4, 5, 6]), + (1720229567.783491, [], [1, 2, 3, 4, 5, 6]), + (1720229693.297904, [], [1, 2, 3, 4, 5, 6]), + (1720229755.453165, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720098291.146186, [], [1, 2, 3, 4, 5, 6])], + [(1719983104.788269, [], [1, 2, 3, 4, 5, 6]), (1720070626.816099, [], [1, 2, 3, 4, 5, 6])], + [ + (1720226820.995006, [], [1, 2, 3, 4, 5, 6]), + (1720316639.892049, [], [1, 2, 3, 4, 5, 6]), + (1720589368.875624, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720070877.576019, [], [1, 2, 3, 4, 5, 6]), (1720234703.959519, [], [1, 2, 3, 4, 5, 6])], + [ + (1719978870.060134, [], [1, 2, 3, 4, 5, 6]), + (1720137971.413991, [], [1, 2, 3, 4, 5, 6]), + (1720491059.303159, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719995043.481796, [], [1, 2, 3, 4, 5, 6]), (1720148819.805573, [], [1, 2, 3, 4, 5, 6])], + [ + (1720052005.359573, [], [1, 2, 3, 4, 5, 6]), + (1720052127.565063, [], [1, 2, 3, 4, 5, 6]), + (1720052129.679258, [], [1, 2, 3, 4, 5, 6]), + (1720064915.795875, [], [1, 2, 3, 4, 5, 6]), + (1720064917.840723, [], [1, 2, 3, 4, 5, 6]), + (1720064919.250429, [], [1, 2, 3, 4, 5, 6]), + (1720140563.359858, [], [1, 2, 3, 4, 5, 6]), + (1720140686.221967, [], [1, 2, 3, 4, 5, 6]), + (1720147133.126896, [], [1, 2, 3, 4, 5, 6]), + (1720154606.237768, [], [1, 2, 3, 4, 5, 6]), + (1720208312.107821, [], [1, 2, 3, 4, 5, 6]), + (1720208397.77235, [], [1, 2, 3, 4, 5, 6]), + (1720208399.88578, [], [1, 2, 3, 4, 5, 6]), + (1720226692.740751, [], [1, 2, 3, 4, 5, 6]), + (1720226809.874422, [], [1, 2, 3, 4, 5, 6]), + (1720226811.929607, [], [1, 2, 3, 4, 5, 6]), + (1720320735.680282, [], [1, 2, 3, 4, 5, 6]), + (1720320737.781583, [], [1, 2, 3, 4, 5, 6]), + (1720394544.101953, [], [1, 2, 3, 4, 5, 6]), + (1720394546.228449, [], [1, 2, 3, 4, 5, 6]), + (1720411628.159882, [], [1, 2, 3, 4, 5, 6]), + (1720411765.678009, [], [1, 2, 3, 4, 5, 6]), + (1720411765.737071, [], [1, 2, 3, 4, 5, 6]), + (1720411771.063593, [], [1, 2, 3, 4, 5, 6]), + (1720493021.815332, [], [1, 2, 3, 4, 5, 6]), + (1720493023.89141, [], [1, 2, 3, 4, 5, 6]), + (1720547092.818141, [], [1, 2, 3, 4, 5, 6]), + (1720547133.337079, [], [1, 2, 3, 4, 5, 6]), + (1720566405.934125, [], [1, 2, 3, 4, 5, 6]), + (1720566407.979963, [], [1, 2, 3, 4, 5, 6]), + (1720592934.864349, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720407281.391395, [], [1, 2, 3, 4, 5, 6]), (1720578489.911262, [], [1, 2, 3, 4, 5, 6])], + [(1720393905.799101, [], [1, 2, 3, 4, 5, 6])], + [ + (1719965431.440319, [], [1, 2, 3, 4, 5, 6]), + (1719965547.770505, [], [1, 2, 3, 4, 5, 6]), + (1719965549.880668, [], [1, 2, 3, 4, 5, 6]), + (1720010279.644796, [], [1, 2, 3, 4, 5, 6]), + (1720010481.117481, [], [1, 2, 3, 4, 5, 6]), + (1720060297.061777, [], [1, 2, 3, 4, 5, 6]), + (1720060299.106745, [], [1, 2, 3, 4, 5, 6]), + (1720072795.790373, [], [1, 2, 3, 4, 5, 6]), + (1720072933.175213, [], [1, 2, 3, 4, 5, 6]), + (1720138923.382269, [], [1, 2, 3, 4, 5, 6]), + (1720138952.892452, [], [1, 2, 3, 4, 5, 6]), + (1720138954.952138, [], [1, 2, 3, 4, 5, 6]), + (1720243737.055635, [], [1, 2, 3, 4, 5, 6]), + (1720243742.725476, [], [1, 2, 3, 4, 5, 6]), + (1720243744.812736, [], [1, 2, 3, 4, 5, 6]), + (1720278868.092914, [], [1, 2, 3, 4, 5, 6]), + (1720278981.120539, [], [1, 2, 3, 4, 5, 6]), + (1720278983.221413, [], [1, 2, 3, 4, 5, 6]), + (1720312851.319112, [], [1, 2, 3, 4, 5, 6]), + (1720312961.59678, [], [1, 2, 3, 4, 5, 6]), + (1720312963.701002, [], [1, 2, 3, 4, 5, 6]), + (1720401167.589016, [], [1, 2, 3, 4, 5, 6]), + (1720401192.232905, [], [1, 2, 3, 4, 5, 6]), + (1720488671.153932, [], [1, 2, 3, 4, 5, 6]), + (1720488673.262556, [], [1, 2, 3, 4, 5, 6]), + (1720498148.914747, [], [1, 2, 3, 4, 5, 6]), + (1720498151.01221, [], [1, 2, 3, 4, 5, 6]), + (1720585902.00157, [], [1, 2, 3, 4, 5, 6]), + (1720585904.068243, [], [1, 2, 3, 4, 5, 6]), + (1720627230.183177, [], [1, 2, 3, 4, 5, 6]), + (1720627251.343451, [], [1, 2, 3, 4, 5, 6]), + (1720627253.395817, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720049050.269443, [], [1, 2, 3, 4, 5, 6])], + [(1720406587.77676, [], [1, 2, 3, 4, 5, 6])], + [ + (1720201120.604139, [], [1, 2, 3, 4, 5, 6]), + (1720237348.670203, [], [1, 2, 3, 4, 5, 6]), + (1720503188.882528, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720137961.069782, [], [1, 2, 3, 4, 5, 6]), (1720233994.333193, [], [1, 2, 3, 4, 5, 6])], + [(1720148673.115174, [], [1, 2, 3, 4, 5, 6])], + [ + (1720017472.013793, [], [1, 2, 3, 4, 5, 6]), + (1720238395.438066, [], [1, 2, 3, 4, 5, 6]), + (1720481118.520931, [], [1, 2, 3, 4, 5, 6]), + (1720624077.141735, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720460985.277631, [], [1, 2, 3, 4, 5, 6])], + [(1720065960.10824, [], [1, 2, 3, 4, 5, 6]), (1720568092.250721, [], [1, 2, 3, 4, 5, 6])], + [(1719964803.220143, [], [1, 2, 3, 4, 5, 6]), (1720072690.78503, [], [1, 2, 3, 4, 5, 6])], + [(1719973118.028284, [], [1, 2, 3, 4, 5, 6])], + [(1720148963.270876, [], [1, 2, 3, 4, 5, 6])], + [ + (1720055800.056897, [], [1, 2, 3, 4, 5, 6]), + (1720147428.656208, [], [1, 2, 3, 4, 5, 6]), + (1720237025.123131, [], [1, 2, 3, 4, 5, 6]), + (1720326902.440989, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719967443.988967, [], [1, 2, 3, 4, 5, 6]), + (1719967507.96168, [], [1, 2, 3, 4, 5, 6]), + (1719967510.01003, [], [1, 2, 3, 4, 5, 6]), + (1719967510.547419, [], [1, 2, 3, 4, 5, 6]), + (1719967512.630086, [], [1, 2, 3, 4, 5, 6]), + (1720138300.130825, [], [1, 2, 3, 4, 5, 6]), + (1720138300.228601, [], [1, 2, 3, 4, 5, 6]), + (1720138302.137449, [], [1, 2, 3, 4, 5, 6]), + (1720266243.760636, [], [1, 2, 3, 4, 5, 6]), + (1720266262.29505, [], [1, 2, 3, 4, 5, 6]), + (1720266262.382243, [], [1, 2, 3, 4, 5, 6]), + (1720266267.714044, [], [1, 2, 3, 4, 5, 6]), + (1720376066.44502, [], [1, 2, 3, 4, 5, 6]), + (1720376075.005446, [], [1, 2, 3, 4, 5, 6]), + (1720376075.055395, [], [1, 2, 3, 4, 5, 6]), + (1720376078.271297, [], [1, 2, 3, 4, 5, 6]), + (1720495615.317205, [], [1, 2, 3, 4, 5, 6]), + (1720495625.121167, [], [1, 2, 3, 4, 5, 6]), + (1720495627.190587, [], [1, 2, 3, 4, 5, 6]), + (1720495631.668389, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720094115.933922, [], [1, 2, 3, 4, 5, 6])], + [(1720327035.126978, [], [1, 2, 3, 4, 5, 6]), (1720652576.382878, [], [1, 2, 3, 4, 5, 6])], + [(1720494001.577927, [], [1, 2, 3, 4, 5, 6])], + [ + (1720102356.301353, [], [1, 2, 3, 4, 5, 6]), + (1720244955.2084, [], [1, 2, 3, 4, 5, 6]), + (1720393949.41044, [], [1, 2, 3, 4, 5, 6]), + (1720576986.579566, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720033048.609785, [], [1, 2, 3, 4, 5, 6])], + [(1720402968.773862, [], [1, 2, 3, 4, 5, 6])], + [(1720230211.716966, [], [1, 2, 3, 4, 5, 6])], + [ + (1720059787.289306, [], [1, 2, 3, 4, 5, 6]), + (1720397378.208597, [], [1, 2, 3, 4, 5, 6]), + (1720481196.422422, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720003149.057462, [], [1, 2, 3, 4, 5, 6]), + (1720003372.883061, [], [1, 2, 3, 4, 5, 6]), + (1720003374.955179, [], [1, 2, 3, 4, 5, 6]), + (1720039541.861276, [], [1, 2, 3, 4, 5, 6]), + (1720039688.427965, [], [1, 2, 3, 4, 5, 6]), + (1720039690.485555, [], [1, 2, 3, 4, 5, 6]), + (1720048625.116329, [], [1, 2, 3, 4, 5, 6]), + (1720048725.117697, [], [1, 2, 3, 4, 5, 6]), + (1720141659.610639, [], [1, 2, 3, 4, 5, 6]), + (1720141661.665952, [], [1, 2, 3, 4, 5, 6]), + (1720196426.042225, [], [1, 2, 3, 4, 5, 6]), + (1720196487.09087, [], [1, 2, 3, 4, 5, 6]), + (1720196489.183893, [], [1, 2, 3, 4, 5, 6]), + (1720207066.952798, [], [1, 2, 3, 4, 5, 6]), + (1720207237.857105, [], [1, 2, 3, 4, 5, 6]), + (1720207239.919375, [], [1, 2, 3, 4, 5, 6]), + (1720271033.503072, [], [1, 2, 3, 4, 5, 6]), + (1720271035.587795, [], [1, 2, 3, 4, 5, 6]), + (1720583123.471438, [], [1, 2, 3, 4, 5, 6]), + (1720583125.576798, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720154856.626343, [], [1, 2, 3, 4, 5, 6]), + (1720226072.346309, [], [1, 2, 3, 4, 5, 6]), + (1720310601.449016, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720242303.35601, [], [1, 2, 3, 4, 5, 6])], + [(1720078404.748142, [], [1, 2, 3, 4, 5, 6]), (1720147584.809447, [], [1, 2, 3, 4, 5, 6])], + [(1720178488.289574, [], [1, 2, 3, 4, 5, 6]), (1720306985.894457, [], [1, 2, 3, 4, 5, 6])], + [(1720146748.830901, [], [1, 2, 3, 4, 5, 6]), (1720406666.368212, [], [1, 2, 3, 4, 5, 6])], + [ + (1720023013.684634, [], [1, 2, 3, 4, 5, 6]), + (1720091577.184398, [], [1, 2, 3, 4, 5, 6]), + (1720415121.299085, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720056649.932043, [], [1, 2, 3, 4, 5, 6])], + [ + (1720323285.584787, [], [1, 2, 3, 4, 5, 6]), + (1720415154.592994, [], [1, 2, 3, 4, 5, 6]), + (1720437978.9498, [], [1, 2, 3, 4, 5, 6]), + (1720473849.744602, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719984649.0919, [], [1, 2, 3, 4, 5, 6]), + (1719984690.14033, [], [1, 2, 3, 4, 5, 6]), + (1720049900.611104, [], [1, 2, 3, 4, 5, 6]), + (1720049902.719326, [], [1, 2, 3, 4, 5, 6]), + (1720070202.827545, [], [1, 2, 3, 4, 5, 6]), + (1720070219.8164, [], [1, 2, 3, 4, 5, 6]), + (1720070221.892056, [], [1, 2, 3, 4, 5, 6]), + (1720137406.074377, [], [1, 2, 3, 4, 5, 6]), + (1720137495.69452, [], [1, 2, 3, 4, 5, 6]), + (1720137497.797134, [], [1, 2, 3, 4, 5, 6]), + (1720328661.139393, [], [1, 2, 3, 4, 5, 6]), + (1720328739.939669, [], [1, 2, 3, 4, 5, 6]), + (1720498901.295947, [], [1, 2, 3, 4, 5, 6]), + (1720570987.624349, [], [1, 2, 3, 4, 5, 6]), + (1720602646.235039, [], [1, 2, 3, 4, 5, 6]), + (1720602812.376711, [], [1, 2, 3, 4, 5, 6]), + (1720602814.446349, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719974528.289353, [], [1, 2, 3, 4, 5, 6]), + (1720395723.591687, [], [1, 2, 3, 4, 5, 6]), + (1720617442.004095, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719975592.337919, [], [1, 2, 3, 4, 5, 6])], + [(1720057591.780745, [], [1, 2, 3, 4, 5, 6]), (1720488152.255523, [], [1, 2, 3, 4, 5, 6])], + [ + (1720148926.955422, [], [1, 2, 3, 4, 5, 6]), + (1720232410.538746, [], [1, 2, 3, 4, 5, 6]), + (1720408447.752538, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720242892.942203, [], [1, 2, 3, 4, 5, 6]), (1720322823.10199, [], [1, 2, 3, 4, 5, 6])], + [(1720400088.852099, [], [1, 2, 3, 4, 5, 6]), (1720652752.741385, [], [1, 2, 3, 4, 5, 6])], + [(1720412428.936544, [], [1, 2, 3, 4, 5, 6]), (1720574790.414977, [], [1, 2, 3, 4, 5, 6])], + [(1720069130.346249, [], [1, 2, 3, 4, 5, 6])], + [ + (1720157382.402748, [], [1, 2, 3, 4, 5, 6]), + (1720157476.064866, [], [1, 2, 3, 4, 5, 6]), + (1720157570.728611, [], [1, 2, 3, 4, 5, 6]), + (1720157662.381552, [], [1, 2, 3, 4, 5, 6]), + (1720168580.805279, [], [1, 2, 3, 4, 5, 6]), + (1720168760.793692, [], [1, 2, 3, 4, 5, 6]), + (1720230288.646191, [], [1, 2, 3, 4, 5, 6]), + (1720230430.103977, [], [1, 2, 3, 4, 5, 6]), + (1720243393.667459, [], [1, 2, 3, 4, 5, 6]), + (1720311604.919662, [], [1, 2, 3, 4, 5, 6]), + (1720311771.258364, [], [1, 2, 3, 4, 5, 6]), + (1720311773.310317, [], [1, 2, 3, 4, 5, 6]), + (1720324823.664232, [], [1, 2, 3, 4, 5, 6]), + (1720324825.721978, [], [1, 2, 3, 4, 5, 6]), + (1720393374.344255, [], [1, 2, 3, 4, 5, 6]), + (1720410680.226051, [], [1, 2, 3, 4, 5, 6]), + (1720410682.252163, [], [1, 2, 3, 4, 5, 6]), + (1720498394.961086, [], [1, 2, 3, 4, 5, 6]), + (1720498440.73496, [], [1, 2, 3, 4, 5, 6]), + (1720569716.948564, [], [1, 2, 3, 4, 5, 6]), + (1720569812.275586, [], [1, 2, 3, 4, 5, 6]), + (1720569814.333894, [], [1, 2, 3, 4, 5, 6]), + (1720569814.53133, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719978988.407662, [], [1, 2, 3, 4, 5, 6]), + (1719978990.511958, [], [1, 2, 3, 4, 5, 6]), + (1719984066.593696, [], [1, 2, 3, 4, 5, 6]), + (1719984323.803693, [], [1, 2, 3, 4, 5, 6]), + (1719984325.863773, [], [1, 2, 3, 4, 5, 6]), + (1720152078.595081, [], [1, 2, 3, 4, 5, 6]), + (1720225565.930941, [], [1, 2, 3, 4, 5, 6]), + (1720280378.036955, [], [1, 2, 3, 4, 5, 6]), + (1720326955.218979, [], [1, 2, 3, 4, 5, 6]), + (1720326957.292481, [], [1, 2, 3, 4, 5, 6]), + (1720331017.031137, [], [1, 2, 3, 4, 5, 6]), + (1720331019.121504, [], [1, 2, 3, 4, 5, 6]), + (1720410756.685202, [], [1, 2, 3, 4, 5, 6]), + (1720410758.796614, [], [1, 2, 3, 4, 5, 6]), + (1720417802.859919, [], [1, 2, 3, 4, 5, 6]), + (1720417804.989442, [], [1, 2, 3, 4, 5, 6]), + (1720487313.059882, [], [1, 2, 3, 4, 5, 6]), + (1720487498.533155, [], [1, 2, 3, 4, 5, 6]), + (1720487500.652063, [], [1, 2, 3, 4, 5, 6]), + (1720487503.708405, [], [1, 2, 3, 4, 5, 6]), + (1720487505.805861, [], [1, 2, 3, 4, 5, 6]), + (1720501546.266299, [], [1, 2, 3, 4, 5, 6]), + (1720501655.51812, [], [1, 2, 3, 4, 5, 6]), + (1720575741.153236, [], [1, 2, 3, 4, 5, 6]), + (1720575891.79104, [], [1, 2, 3, 4, 5, 6]), + (1720589520.786652, [], [1, 2, 3, 4, 5, 6]), + (1720589642.390304, [], [1, 2, 3, 4, 5, 6]), + (1720589644.452771, [], [1, 2, 3, 4, 5, 6]), + (1720615233.591986, [], [1, 2, 3, 4, 5, 6]), + (1720615337.890481, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719971542.018584, [], [1, 2, 3, 4, 5, 6]), + (1719971788.12398, [], [1, 2, 3, 4, 5, 6]), + (1720117268.913487, [], [1, 2, 3, 4, 5, 6]), + (1720117270.991206, [], [1, 2, 3, 4, 5, 6]), + (1720143198.612101, [], [1, 2, 3, 4, 5, 6]), + (1720143429.084839, [], [1, 2, 3, 4, 5, 6]), + (1720238625.308642, [], [1, 2, 3, 4, 5, 6]), + (1720238757.81922, [], [1, 2, 3, 4, 5, 6]), + (1720238759.894117, [], [1, 2, 3, 4, 5, 6]), + (1720330550.917977, [], [1, 2, 3, 4, 5, 6]), + (1720330626.459228, [], [1, 2, 3, 4, 5, 6]), + (1720378937.313156, [], [1, 2, 3, 4, 5, 6]), + (1720379038.375789, [], [1, 2, 3, 4, 5, 6]), + (1720386358.224787, [], [1, 2, 3, 4, 5, 6]), + (1720386360.275601, [], [1, 2, 3, 4, 5, 6]), + (1720416486.117358, [], [1, 2, 3, 4, 5, 6]), + (1720416608.109114, [], [1, 2, 3, 4, 5, 6]), + (1720493716.833205, [], [1, 2, 3, 4, 5, 6]), + (1720493844.641363, [], [1, 2, 3, 4, 5, 6]), + (1720493846.67691, [], [1, 2, 3, 4, 5, 6]), + (1720568118.486107, [], [1, 2, 3, 4, 5, 6]), + (1720568219.230995, [], [1, 2, 3, 4, 5, 6]), + (1720568221.334344, [], [1, 2, 3, 4, 5, 6]), + (1720574746.351324, [], [1, 2, 3, 4, 5, 6]), + (1720574815.297689, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720233195.120615, [], [1, 2, 3, 4, 5, 6]), + (1720393731.964556, [], [1, 2, 3, 4, 5, 6]), + (1720570257.699261, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720149655.238203, [], [1, 2, 3, 4, 5, 6]), + (1720352361.227124, [], [1, 2, 3, 4, 5, 6]), + (1720578697.147852, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720411277.985506, [], [1, 2, 3, 4, 5, 6]), (1720572981.673421, [], [1, 2, 3, 4, 5, 6])], + [(1720569584.93865, [], [1, 2, 3, 4, 5, 6])], + [(1720311303.894177, [], [1, 2, 3, 4, 5, 6])], + [(1720576463.87807, [], [1, 2, 3, 4, 5, 6])], + [ + (1719982989.782732, [], [1, 2, 3, 4, 5, 6]), + (1720080708.007665, [], [1, 2, 3, 4, 5, 6]), + (1720234553.333259, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719983711.203136, [], [1, 2, 3, 4, 5, 6]), (1720395076.590109, [], [1, 2, 3, 4, 5, 6])], + [ + (1719968905.802345, [], [1, 2, 3, 4, 5, 6]), + (1720054751.228152, [], [1, 2, 3, 4, 5, 6]), + (1720393228.571573, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720151965.57438, [], [1, 2, 3, 4, 5, 6]), + (1720265917.280767, [], [1, 2, 3, 4, 5, 6]), + (1720414597.498797, [], [1, 2, 3, 4, 5, 6]), + (1720569352.211054, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720139623.448558, [], [1, 2, 3, 4, 5, 6]), (1720413909.371495, [], [1, 2, 3, 4, 5, 6])], + [ + (1720332156.972433, [], [1, 2, 3, 4, 5, 6]), + (1720486770.808084, [], [1, 2, 3, 4, 5, 6]), + (1720570506.129092, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720046377.309817, [], [1, 2, 3, 4, 5, 6]), (1720144405.103977, [], [1, 2, 3, 4, 5, 6])], + [(1720311749.460887, [], [1, 2, 3, 4, 5, 6]), (1720480404.801297, [], [1, 2, 3, 4, 5, 6])], + [(1719965504.779832, [], [1, 2, 3, 4, 5, 6])], + [(1720489219.425884, [], [1, 2, 3, 4, 5, 6])], + [(1720480774.306063, [], [1, 2, 3, 4, 5, 6])], + [(1719968486.759348, [], [1, 2, 3, 4, 5, 6]), (1720229505.650825, [], [1, 2, 3, 4, 5, 6])], + [(1720122526.844622, [], [1, 2, 3, 4, 5, 6]), (1720142320.524851, [], [1, 2, 3, 4, 5, 6])], + [(1720320995.267452, [], [1, 2, 3, 4, 5, 6])], + [ + (1719984296.990609, [], [1, 2, 3, 4, 5, 6]), + (1719984350.22369, [], [1, 2, 3, 4, 5, 6]), + (1719984352.257572, [], [1, 2, 3, 4, 5, 6]), + (1720052997.807277, [], [1, 2, 3, 4, 5, 6]), + (1720053049.304507, [], [1, 2, 3, 4, 5, 6]), + (1720053051.378602, [], [1, 2, 3, 4, 5, 6]), + (1720150389.546489, [], [1, 2, 3, 4, 5, 6]), + (1720150391.660121, [], [1, 2, 3, 4, 5, 6]), + (1720168392.833864, [], [1, 2, 3, 4, 5, 6]), + (1720168394.931787, [], [1, 2, 3, 4, 5, 6]), + (1720218901.13549, [], [1, 2, 3, 4, 5, 6]), + (1720218984.364651, [], [1, 2, 3, 4, 5, 6]), + (1720218986.444335, [], [1, 2, 3, 4, 5, 6]), + (1720236871.413173, [], [1, 2, 3, 4, 5, 6]), + (1720236873.52, [], [1, 2, 3, 4, 5, 6]), + (1720318899.245869, [], [1, 2, 3, 4, 5, 6]), + (1720319077.080816, [], [1, 2, 3, 4, 5, 6]), + (1720319079.181328, [], [1, 2, 3, 4, 5, 6]), + (1720421585.966107, [], [1, 2, 3, 4, 5, 6]), + (1720421692.177002, [], [1, 2, 3, 4, 5, 6]), + (1720421694.269891, [], [1, 2, 3, 4, 5, 6]), + (1720484559.101295, [], [1, 2, 3, 4, 5, 6]), + (1720484634.686657, [], [1, 2, 3, 4, 5, 6]), + (1720484636.791229, [], [1, 2, 3, 4, 5, 6]), + (1720484640.876498, [], [1, 2, 3, 4, 5, 6]), + (1720484642.914839, [], [1, 2, 3, 4, 5, 6]), + (1720568978.955929, [], [1, 2, 3, 4, 5, 6]), + (1720569083.551067, [], [1, 2, 3, 4, 5, 6]), + (1720603472.634189, [], [1, 2, 3, 4, 5, 6]), + (1720603622.57534, [], [1, 2, 3, 4, 5, 6]), + (1720603624.69381, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720204831.715905, [], [1, 2, 3, 4, 5, 6])], + [(1719985836.773241, [], [1, 2, 3, 4, 5, 6]), (1720227063.151216, [], [1, 2, 3, 4, 5, 6])], + [(1720226998.434162, [], [1, 2, 3, 4, 5, 6])], + [(1720413326.470543, [], [1, 2, 3, 4, 5, 6])], + [(1720484747.542658, [], [1, 2, 3, 4, 5, 6])], + [ + (1719975290.47648, [], [1, 2, 3, 4, 5, 6]), + (1719975421.539502, [], [1, 2, 3, 4, 5, 6]), + (1719975423.609947, [], [1, 2, 3, 4, 5, 6]), + (1719975424.130571, [], [1, 2, 3, 4, 5, 6]), + (1719975426.224942, [], [1, 2, 3, 4, 5, 6]), + (1720024823.782769, [], [1, 2, 3, 4, 5, 6]), + (1720024943.32157, [], [1, 2, 3, 4, 5, 6]), + (1720024945.350078, [], [1, 2, 3, 4, 5, 6]), + (1720024945.905366, [], [1, 2, 3, 4, 5, 6]), + (1720056619.663053, [], [1, 2, 3, 4, 5, 6]), + (1720056822.318399, [], [1, 2, 3, 4, 5, 6]), + (1720056824.36183, [], [1, 2, 3, 4, 5, 6]), + (1720072281.168332, [], [1, 2, 3, 4, 5, 6]), + (1720072283.222603, [], [1, 2, 3, 4, 5, 6]), + (1720134154.949613, [], [1, 2, 3, 4, 5, 6]), + (1720134376.323715, [], [1, 2, 3, 4, 5, 6]), + (1720134378.416906, [], [1, 2, 3, 4, 5, 6]), + (1720140112.874786, [], [1, 2, 3, 4, 5, 6]), + (1720140131.322854, [], [1, 2, 3, 4, 5, 6]), + (1720140133.38169, [], [1, 2, 3, 4, 5, 6]), + (1720238635.597737, [], [1, 2, 3, 4, 5, 6]), + (1720238637.672121, [], [1, 2, 3, 4, 5, 6]), + (1720418306.625113, [], [1, 2, 3, 4, 5, 6]), + (1720418333.5673, [], [1, 2, 3, 4, 5, 6]), + (1720487528.439985, [], [1, 2, 3, 4, 5, 6]), + (1720487546.337876, [], [1, 2, 3, 4, 5, 6]), + (1720487548.449392, [], [1, 2, 3, 4, 5, 6]), + (1720502509.125496, [], [1, 2, 3, 4, 5, 6]), + (1720502624.411704, [], [1, 2, 3, 4, 5, 6]), + (1720585053.028856, [], [1, 2, 3, 4, 5, 6]), + (1720585055.08891, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719982052.592392, [], [1, 2, 3, 4, 5, 6]), + (1719982258.203523, [], [1, 2, 3, 4, 5, 6]), + (1719985183.079021, [], [1, 2, 3, 4, 5, 6]), + (1719985305.780432, [], [1, 2, 3, 4, 5, 6]), + (1720055777.47168, [], [1, 2, 3, 4, 5, 6]), + (1720071284.174477, [], [1, 2, 3, 4, 5, 6]), + (1720152558.44266, [], [1, 2, 3, 4, 5, 6]), + (1720152671.333552, [], [1, 2, 3, 4, 5, 6]), + (1720152673.415669, [], [1, 2, 3, 4, 5, 6]), + (1720182134.94743, [], [1, 2, 3, 4, 5, 6]), + (1720182137.028427, [], [1, 2, 3, 4, 5, 6]), + (1720182138.842693, [], [1, 2, 3, 4, 5, 6]), + (1720182140.958073, [], [1, 2, 3, 4, 5, 6]), + (1720227575.333539, [], [1, 2, 3, 4, 5, 6]), + (1720227736.260264, [], [1, 2, 3, 4, 5, 6]), + (1720227738.300477, [], [1, 2, 3, 4, 5, 6]), + (1720236068.538351, [], [1, 2, 3, 4, 5, 6]), + (1720236070.60483, [], [1, 2, 3, 4, 5, 6]), + (1720317757.071545, [], [1, 2, 3, 4, 5, 6]), + (1720317866.130343, [], [1, 2, 3, 4, 5, 6]), + (1720317868.22788, [], [1, 2, 3, 4, 5, 6]), + (1720330462.980036, [], [1, 2, 3, 4, 5, 6]), + (1720330707.655663, [], [1, 2, 3, 4, 5, 6]), + (1720330709.750072, [], [1, 2, 3, 4, 5, 6]), + (1720373940.747523, [], [1, 2, 3, 4, 5, 6]), + (1720374027.697475, [], [1, 2, 3, 4, 5, 6]), + (1720374029.799405, [], [1, 2, 3, 4, 5, 6]), + (1720406670.90306, [], [1, 2, 3, 4, 5, 6]), + (1720406844.478936, [], [1, 2, 3, 4, 5, 6]), + (1720406846.592556, [], [1, 2, 3, 4, 5, 6]), + (1720406851.281701, [], [1, 2, 3, 4, 5, 6]), + (1720412576.66958, [], [1, 2, 3, 4, 5, 6]), + (1720412652.412012, [], [1, 2, 3, 4, 5, 6]), + (1720412654.533239, [], [1, 2, 3, 4, 5, 6]), + (1720486184.910769, [], [1, 2, 3, 4, 5, 6]), + (1720571066.49819, [], [1, 2, 3, 4, 5, 6]), + (1720626129.788949, [], [1, 2, 3, 4, 5, 6]), + (1720626261.332132, [], [1, 2, 3, 4, 5, 6]), + (1720626263.446226, [], [1, 2, 3, 4, 5, 6]), + (1720626267.27108, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719972691.157799, [], [1, 2, 3, 4, 5, 6]), + (1719972822.458675, [], [1, 2, 3, 4, 5, 6]), + (1719972824.589752, [], [1, 2, 3, 4, 5, 6]), + (1719993329.829737, [], [1, 2, 3, 4, 5, 6]), + (1720053155.834123, [], [1, 2, 3, 4, 5, 6]), + (1720053293.330528, [], [1, 2, 3, 4, 5, 6]), + (1720084358.281667, [], [1, 2, 3, 4, 5, 6]), + (1720084522.554824, [], [1, 2, 3, 4, 5, 6]), + (1720084524.658959, [], [1, 2, 3, 4, 5, 6]), + (1720153146.842639, [], [1, 2, 3, 4, 5, 6]), + (1720153159.620793, [], [1, 2, 3, 4, 5, 6]), + (1720223041.359927, [], [1, 2, 3, 4, 5, 6]), + (1720223107.178767, [], [1, 2, 3, 4, 5, 6]), + (1720223109.301943, [], [1, 2, 3, 4, 5, 6]), + (1720243608.145196, [], [1, 2, 3, 4, 5, 6]), + (1720243715.813915, [], [1, 2, 3, 4, 5, 6]), + (1720310055.295457, [], [1, 2, 3, 4, 5, 6]), + (1720310225.190394, [], [1, 2, 3, 4, 5, 6]), + (1720310227.250668, [], [1, 2, 3, 4, 5, 6]), + (1720374255.295948, [], [1, 2, 3, 4, 5, 6]), + (1720374257.390432, [], [1, 2, 3, 4, 5, 6]), + (1720397033.790744, [], [1, 2, 3, 4, 5, 6]), + (1720397192.93351, [], [1, 2, 3, 4, 5, 6]), + (1720489562.012912, [], [1, 2, 3, 4, 5, 6]), + (1720489620.124167, [], [1, 2, 3, 4, 5, 6]), + (1720489622.2461, [], [1, 2, 3, 4, 5, 6]), + (1720577615.944083, [], [1, 2, 3, 4, 5, 6]), + (1720595908.263871, [], [1, 2, 3, 4, 5, 6]), + (1720596022.795818, [], [1, 2, 3, 4, 5, 6]), + (1720596024.909409, [], [1, 2, 3, 4, 5, 6]), + (1720596025.112291, [], [1, 2, 3, 4, 5, 6]), + (1720596027.181848, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720410184.878056, [], [1, 2, 3, 4, 5, 6])], + [(1720446097.457008, [], [1, 2, 3, 4, 5, 6]), (1720582142.651169, [], [1, 2, 3, 4, 5, 6])], + [(1720496385.651227, [], [1, 2, 3, 4, 5, 6]), (1720588018.159792, [], [1, 2, 3, 4, 5, 6])], + [(1719975737.968693, [], [1, 2, 3, 4, 5, 6]), (1720069758.403541, [], [1, 2, 3, 4, 5, 6])], + [(1720573220.196789, [], [1, 2, 3, 4, 5, 6])], + [(1720435170.4772, [], [1, 2, 3, 4, 5, 6])], + [(1720156838.062846, [], [1, 2, 3, 4, 5, 6]), (1720588244.606338, [], [1, 2, 3, 4, 5, 6])], + [(1720072706.921977, [], [1, 2, 3, 4, 5, 6]), (1720588899.722119, [], [1, 2, 3, 4, 5, 6])], + [(1720058343.607628, [], [1, 2, 3, 4, 5, 6])], + [(1720314365.315505, [], [1, 2, 3, 4, 5, 6])], + [(1720502417.751936, [], [1, 2, 3, 4, 5, 6])], + [(1720302708.367359, [], [1, 2, 3, 4, 5, 6]), (1720585704.559633, [], [1, 2, 3, 4, 5, 6])], + [ + (1719984540.656777, [], [1, 2, 3, 4, 5, 6]), + (1720146973.8651, [], [1, 2, 3, 4, 5, 6]), + (1720341798.159409, [], [1, 2, 3, 4, 5, 6]), + (1720482149.273983, [], [1, 2, 3, 4, 5, 6]), + (1720570969.604085, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720311167.154928, [], [1, 2, 3, 4, 5, 6]), (1720404884.472902, [], [1, 2, 3, 4, 5, 6])], + [(1720481370.176729, [], [1, 2, 3, 4, 5, 6])], + [(1720098969.778426, [], [1, 2, 3, 4, 5, 6]), (1720542708.023885, [], [1, 2, 3, 4, 5, 6])], + [ + (1720149583.597081, [], [1, 2, 3, 4, 5, 6]), + (1720314574.406545, [], [1, 2, 3, 4, 5, 6]), + (1720416038.659142, [], [1, 2, 3, 4, 5, 6]), + (1720572347.697131, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720033450.660601, [], [1, 2, 3, 4, 5, 6]), + (1720033450.752555, [], [1, 2, 3, 4, 5, 6]), + (1720033455.770905, [], [1, 2, 3, 4, 5, 6]), + (1720400227.89913, [], [1, 2, 3, 4, 5, 6]), + (1720400250.299243, [], [1, 2, 3, 4, 5, 6]), + (1720400252.394995, [], [1, 2, 3, 4, 5, 6]), + (1720417432.186774, [], [1, 2, 3, 4, 5, 6]), + (1720417504.33498, [], [1, 2, 3, 4, 5, 6]), + (1720417506.39095, [], [1, 2, 3, 4, 5, 6]), + (1720417509.515927, [], [1, 2, 3, 4, 5, 6]), + (1720417511.647347, [], [1, 2, 3, 4, 5, 6]), + (1720417512.311827, [], [1, 2, 3, 4, 5, 6]), + (1720512787.544525, [], [1, 2, 3, 4, 5, 6]), + (1720512787.637452, [], [1, 2, 3, 4, 5, 6]), + (1720512790.509437, [], [1, 2, 3, 4, 5, 6]), + (1720546384.085434, [], [1, 2, 3, 4, 5, 6]), + (1720546402.635026, [], [1, 2, 3, 4, 5, 6]), + (1720546404.721606, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720091612.414745, [], [1, 2, 3, 4, 5, 6]), (1720394180.692823, [], [1, 2, 3, 4, 5, 6])], + [(1720499565.692616, [], [1, 2, 3, 4, 5, 6])], + [ + (1720050050.500015, [], [1, 2, 3, 4, 5, 6]), + (1720050118.092842, [], [1, 2, 3, 4, 5, 6]), + (1720050225.108458, [], [1, 2, 3, 4, 5, 6]), + (1720065727.65261, [], [1, 2, 3, 4, 5, 6]), + (1720065830.735126, [], [1, 2, 3, 4, 5, 6]), + (1720065888.617825, [], [1, 2, 3, 4, 5, 6]), + (1720065986.346168, [], [1, 2, 3, 4, 5, 6]), + (1720066099.675624, [], [1, 2, 3, 4, 5, 6]), + (1720066274.481661, [], [1, 2, 3, 4, 5, 6]), + (1720066349.195281, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720151340.048882, [], [1, 2, 3, 4, 5, 6]), + (1720314849.375028, [], [1, 2, 3, 4, 5, 6]), + (1720413869.9313, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720054419.118301, [], [1, 2, 3, 4, 5, 6]), (1720399142.008777, [], [1, 2, 3, 4, 5, 6])], + [ + (1720070403.925696, [], [1, 2, 3, 4, 5, 6]), + (1720244978.0606, [], [1, 2, 3, 4, 5, 6]), + (1720588083.280232, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720142944.669634, [], [1, 2, 3, 4, 5, 6]), + (1720317829.682224, [], [1, 2, 3, 4, 5, 6]), + (1720402172.873968, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720135813.27587, [], [1, 2, 3, 4, 5, 6]), + (1720344219.879026, [], [1, 2, 3, 4, 5, 6]), + (1720587780.127476, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720307062.497702, [], [1, 2, 3, 4, 5, 6])], + [(1720145491.250495, [], [1, 2, 3, 4, 5, 6]), (1720586242.178403, [], [1, 2, 3, 4, 5, 6])], + [(1720586566.015615, [], [1, 2, 3, 4, 5, 6])], + [(1720409077.369118, [], [1, 2, 3, 4, 5, 6]), (1720569722.833936, [], [1, 2, 3, 4, 5, 6])], + [(1720098442.029928, [], [1, 2, 3, 4, 5, 6]), (1720391796.053942, [], [1, 2, 3, 4, 5, 6])], + [(1720243240.863801, [], [1, 2, 3, 4, 5, 6]), (1720492060.909226, [], [1, 2, 3, 4, 5, 6])], + [ + (1719967064.859664, [], [1, 2, 3, 4, 5, 6]), + (1720055971.205432, [], [1, 2, 3, 4, 5, 6]), + (1720056017.075129, [], [1, 2, 3, 4, 5, 6]), + (1720069354.406111, [], [1, 2, 3, 4, 5, 6]), + (1720069356.466395, [], [1, 2, 3, 4, 5, 6]), + (1720153990.829373, [], [1, 2, 3, 4, 5, 6]), + (1720154034.130574, [], [1, 2, 3, 4, 5, 6]), + (1720232518.365492, [], [1, 2, 3, 4, 5, 6]), + (1720232668.084829, [], [1, 2, 3, 4, 5, 6]), + (1720232670.147567, [], [1, 2, 3, 4, 5, 6]), + (1720247796.598987, [], [1, 2, 3, 4, 5, 6]), + (1720310742.132713, [], [1, 2, 3, 4, 5, 6]), + (1720310784.012123, [], [1, 2, 3, 4, 5, 6]), + (1720320091.334971, [], [1, 2, 3, 4, 5, 6]), + (1720357505.367765, [], [1, 2, 3, 4, 5, 6]), + (1720357507.406388, [], [1, 2, 3, 4, 5, 6]), + (1720404625.988586, [], [1, 2, 3, 4, 5, 6]), + (1720404726.50447, [], [1, 2, 3, 4, 5, 6]), + (1720404728.609789, [], [1, 2, 3, 4, 5, 6]), + (1720417440.696768, [], [1, 2, 3, 4, 5, 6]), + (1720460381.831877, [], [1, 2, 3, 4, 5, 6]), + (1720460489.831088, [], [1, 2, 3, 4, 5, 6]), + (1720492881.459734, [], [1, 2, 3, 4, 5, 6]), + (1720492883.570789, [], [1, 2, 3, 4, 5, 6]), + (1720580680.591028, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719975556.382414, [], [1, 2, 3, 4, 5, 6]), (1720231475.932226, [], [1, 2, 3, 4, 5, 6])], + [(1720569569.754826, [], [1, 2, 3, 4, 5, 6])], + [(1720043952.413223, [], [1, 2, 3, 4, 5, 6]), (1720225500.222696, [], [1, 2, 3, 4, 5, 6])], + [ + (1719967819.052883, [], [1, 2, 3, 4, 5, 6]), + (1720234292.697748, [], [1, 2, 3, 4, 5, 6]), + (1720397113.348799, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720087477.672876, [], [1, 2, 3, 4, 5, 6]), (1720438489.760021, [], [1, 2, 3, 4, 5, 6])], + [(1720577383.739689, [], [1, 2, 3, 4, 5, 6])], + [ + (1720047896.111507, [], [1, 2, 3, 4, 5, 6]), + (1720200244.93862, [], [1, 2, 3, 4, 5, 6]), + (1720464543.942733, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719970307.394382, [], [1, 2, 3, 4, 5, 6])], + [(1719982131.954439, [], [1, 2, 3, 4, 5, 6]), (1720238111.874625, [], [1, 2, 3, 4, 5, 6])], + [(1719966189.201726, [], [1, 2, 3, 4, 5, 6]), (1720150700.452652, [], [1, 2, 3, 4, 5, 6])], + [(1720142072.057736, [], [1, 2, 3, 4, 5, 6])], + [(1720063956.632264, [], [1, 2, 3, 4, 5, 6])], + [(1720057020.243682, [], [1, 2, 3, 4, 5, 6])], + [ + (1719973656.343743, [], [1, 2, 3, 4, 5, 6]), + (1719973767.615562, [], [1, 2, 3, 4, 5, 6]), + (1719973769.702368, [], [1, 2, 3, 4, 5, 6]), + (1720050422.073716, [], [1, 2, 3, 4, 5, 6]), + (1720060932.515015, [], [1, 2, 3, 4, 5, 6]), + (1720061076.268193, [], [1, 2, 3, 4, 5, 6]), + (1720061078.375626, [], [1, 2, 3, 4, 5, 6]), + (1720061082.695369, [], [1, 2, 3, 4, 5, 6]), + (1720061084.803345, [], [1, 2, 3, 4, 5, 6]), + (1720061088.695247, [], [1, 2, 3, 4, 5, 6]), + (1720061090.795592, [], [1, 2, 3, 4, 5, 6]), + (1720135057.814031, [], [1, 2, 3, 4, 5, 6]), + (1720135295.655111, [], [1, 2, 3, 4, 5, 6]), + (1720135297.734383, [], [1, 2, 3, 4, 5, 6]), + (1720156608.706907, [], [1, 2, 3, 4, 5, 6]), + (1720156610.771323, [], [1, 2, 3, 4, 5, 6]), + (1720208160.885537, [], [1, 2, 3, 4, 5, 6]), + (1720208274.821579, [], [1, 2, 3, 4, 5, 6]), + (1720208276.929569, [], [1, 2, 3, 4, 5, 6]), + (1720223270.224257, [], [1, 2, 3, 4, 5, 6]), + (1720223272.316827, [], [1, 2, 3, 4, 5, 6]), + (1720223275.15326, [], [1, 2, 3, 4, 5, 6]), + (1720261350.082829, [], [1, 2, 3, 4, 5, 6]), + (1720261423.329391, [], [1, 2, 3, 4, 5, 6]), + (1720261425.427693, [], [1, 2, 3, 4, 5, 6]), + (1720319448.712298, [], [1, 2, 3, 4, 5, 6]), + (1720319512.283877, [], [1, 2, 3, 4, 5, 6]), + (1720319514.384024, [], [1, 2, 3, 4, 5, 6]), + (1720397163.860459, [], [1, 2, 3, 4, 5, 6]), + (1720397201.824506, [], [1, 2, 3, 4, 5, 6]), + (1720397203.898302, [], [1, 2, 3, 4, 5, 6]), + (1720487465.098454, [], [1, 2, 3, 4, 5, 6]), + (1720487616.241062, [], [1, 2, 3, 4, 5, 6]), + (1720576783.799559, [], [1, 2, 3, 4, 5, 6]), + (1720576837.625767, [], [1, 2, 3, 4, 5, 6]), + (1720576839.747181, [], [1, 2, 3, 4, 5, 6]), + (1720621584.709553, [], [1, 2, 3, 4, 5, 6]), + (1720621686.14789, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720210871.32264, [], [1, 2, 3, 4, 5, 6]), (1720324564.785043, [], [1, 2, 3, 4, 5, 6])], + [ + (1719976074.774068, [], [1, 2, 3, 4, 5, 6]), + (1720432422.057214, [], [1, 2, 3, 4, 5, 6]), + (1720498059.505822, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720537307.322626, [], [1, 2, 3, 4, 5, 6])], + [(1720051279.943281, [], [1, 2, 3, 4, 5, 6]), (1720326635.291429, [], [1, 2, 3, 4, 5, 6])], + [ + (1720317156.788498, [], [1, 2, 3, 4, 5, 6]), + (1720409687.362687, [], [1, 2, 3, 4, 5, 6]), + (1720499324.086042, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720058380.956986, [], [1, 2, 3, 4, 5, 6]), (1720150021.258812, [], [1, 2, 3, 4, 5, 6])], + [ + (1720053489.906165, [], [1, 2, 3, 4, 5, 6]), + (1720139673.741326, [], [1, 2, 3, 4, 5, 6]), + (1720257769.454421, [], [1, 2, 3, 4, 5, 6]), + (1720393246.483443, [], [1, 2, 3, 4, 5, 6]), + (1720653126.121555, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719980029.84453, [], [1, 2, 3, 4, 5, 6])], + [(1720579706.660021, [], [1, 2, 3, 4, 5, 6])], + [(1720044200.360238, [], [1, 2, 3, 4, 5, 6])], + [(1720081853.12837, [], [1, 2, 3, 4, 5, 6])], + [(1720052881.805602, [], [1, 2, 3, 4, 5, 6])], + [ + (1720072654.6007, [], [1, 2, 3, 4, 5, 6]), + (1720238922.414211, [], [1, 2, 3, 4, 5, 6]), + (1720410048.118631, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720307869.769519, [], [1, 2, 3, 4, 5, 6])], + [ + (1720016591.216479, [], [1, 2, 3, 4, 5, 6]), + (1720157097.134758, [], [1, 2, 3, 4, 5, 6]), + (1720238731.063819, [], [1, 2, 3, 4, 5, 6]), + (1720575486.387284, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719965711.424761, [], [1, 2, 3, 4, 5, 6]), + (1719965713.531779, [], [1, 2, 3, 4, 5, 6]), + (1720049361.440421, [], [1, 2, 3, 4, 5, 6]), + (1720049363.495467, [], [1, 2, 3, 4, 5, 6]), + (1720070584.34326, [], [1, 2, 3, 4, 5, 6]), + (1720070586.439897, [], [1, 2, 3, 4, 5, 6]), + (1720070588.168108, [], [1, 2, 3, 4, 5, 6]), + (1720154076.359366, [], [1, 2, 3, 4, 5, 6]), + (1720154271.555582, [], [1, 2, 3, 4, 5, 6]), + (1720221139.621509, [], [1, 2, 3, 4, 5, 6]), + (1720221264.378154, [], [1, 2, 3, 4, 5, 6]), + (1720221266.479342, [], [1, 2, 3, 4, 5, 6]), + (1720239415.452192, [], [1, 2, 3, 4, 5, 6]), + (1720239475.014596, [], [1, 2, 3, 4, 5, 6]), + (1720239477.07742, [], [1, 2, 3, 4, 5, 6]), + (1720313035.217622, [], [1, 2, 3, 4, 5, 6]), + (1720313041.373706, [], [1, 2, 3, 4, 5, 6]), + (1720313043.420222, [], [1, 2, 3, 4, 5, 6]), + (1720406631.8599, [], [1, 2, 3, 4, 5, 6]), + (1720406659.023715, [], [1, 2, 3, 4, 5, 6]), + (1720484615.165994, [], [1, 2, 3, 4, 5, 6]), + (1720484638.913162, [], [1, 2, 3, 4, 5, 6]), + (1720497880.450011, [], [1, 2, 3, 4, 5, 6]), + (1720497934.842426, [], [1, 2, 3, 4, 5, 6]), + (1720497936.912581, [], [1, 2, 3, 4, 5, 6]), + (1720540604.563371, [], [1, 2, 3, 4, 5, 6]), + (1720540779.42356, [], [1, 2, 3, 4, 5, 6]), + (1720540781.553641, [], [1, 2, 3, 4, 5, 6]), + (1720570083.468668, [], [1, 2, 3, 4, 5, 6]), + (1720570174.103962, [], [1, 2, 3, 4, 5, 6]), + (1720570176.16906, [], [1, 2, 3, 4, 5, 6]), + (1720583667.401678, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719984388.470208, [], [1, 2, 3, 4, 5, 6]), + (1720057777.644161, [], [1, 2, 3, 4, 5, 6]), + (1720057953.012529, [], [1, 2, 3, 4, 5, 6]), + (1720057955.119335, [], [1, 2, 3, 4, 5, 6]), + (1720148795.685708, [], [1, 2, 3, 4, 5, 6]), + (1720148805.802813, [], [1, 2, 3, 4, 5, 6]), + (1720148807.834184, [], [1, 2, 3, 4, 5, 6]), + (1720234649.035149, [], [1, 2, 3, 4, 5, 6]), + (1720234705.911474, [], [1, 2, 3, 4, 5, 6]), + (1720321956.32096, [], [1, 2, 3, 4, 5, 6]), + (1720321958.41442, [], [1, 2, 3, 4, 5, 6]), + (1720409662.220157, [], [1, 2, 3, 4, 5, 6]), + (1720409664.333692, [], [1, 2, 3, 4, 5, 6]), + (1720447795.315077, [], [1, 2, 3, 4, 5, 6]), + (1720447797.391813, [], [1, 2, 3, 4, 5, 6]), + (1720483016.135213, [], [1, 2, 3, 4, 5, 6]), + (1720483018.21033, [], [1, 2, 3, 4, 5, 6]), + (1720483019.948558, [], [1, 2, 3, 4, 5, 6]), + (1720573042.040836, [], [1, 2, 3, 4, 5, 6]), + (1720573166.473551, [], [1, 2, 3, 4, 5, 6]), + (1720624631.359534, [], [1, 2, 3, 4, 5, 6]), + (1720624673.478312, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720072684.014623, [], [1, 2, 3, 4, 5, 6]), + (1720308176.896274, [], [1, 2, 3, 4, 5, 6]), + (1720404626.250723, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719970599.437881, [], [1, 2, 3, 4, 5, 6]), + (1720066959.831691, [], [1, 2, 3, 4, 5, 6]), + (1720067155.392921, [], [1, 2, 3, 4, 5, 6]), + (1720067157.488109, [], [1, 2, 3, 4, 5, 6]), + (1720155932.952678, [], [1, 2, 3, 4, 5, 6]), + (1720156052.447154, [], [1, 2, 3, 4, 5, 6]), + (1720156054.565087, [], [1, 2, 3, 4, 5, 6]), + (1720176991.157569, [], [1, 2, 3, 4, 5, 6]), + (1720177007.156433, [], [1, 2, 3, 4, 5, 6]), + (1720197632.204363, [], [1, 2, 3, 4, 5, 6]), + (1720197634.246364, [], [1, 2, 3, 4, 5, 6]), + (1720245139.96838, [], [1, 2, 3, 4, 5, 6]), + (1720245142.060531, [], [1, 2, 3, 4, 5, 6]), + (1720313085.938317, [], [1, 2, 3, 4, 5, 6]), + (1720313087.991269, [], [1, 2, 3, 4, 5, 6]), + (1720382887.239454, [], [1, 2, 3, 4, 5, 6]), + (1720383102.784337, [], [1, 2, 3, 4, 5, 6]), + (1720383104.858248, [], [1, 2, 3, 4, 5, 6]), + (1720401645.882852, [], [1, 2, 3, 4, 5, 6]), + (1720401875.751914, [], [1, 2, 3, 4, 5, 6]), + (1720401877.871081, [], [1, 2, 3, 4, 5, 6]), + (1720401879.349072, [], [1, 2, 3, 4, 5, 6]), + (1720401881.439362, [], [1, 2, 3, 4, 5, 6]), + (1720414595.608826, [], [1, 2, 3, 4, 5, 6]), + (1720414633.289105, [], [1, 2, 3, 4, 5, 6]), + (1720414635.359202, [], [1, 2, 3, 4, 5, 6]), + (1720417003.580682, [], [1, 2, 3, 4, 5, 6]), + (1720417074.130853, [], [1, 2, 3, 4, 5, 6]), + (1720417076.197409, [], [1, 2, 3, 4, 5, 6]), + (1720480715.553431, [], [1, 2, 3, 4, 5, 6]), + (1720480828.705337, [], [1, 2, 3, 4, 5, 6]), + (1720480830.783164, [], [1, 2, 3, 4, 5, 6]), + (1720579482.271054, [], [1, 2, 3, 4, 5, 6]), + (1720579503.249382, [], [1, 2, 3, 4, 5, 6]), + (1720579505.309044, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720320578.9281, [], [1, 2, 3, 4, 5, 6]), + (1720320637.501704, [], [1, 2, 3, 4, 5, 6]), + (1720320671.3461, [], [1, 2, 3, 4, 5, 6]), + (1720320700.900689, [], [1, 2, 3, 4, 5, 6]), + (1720326925.640353, [], [1, 2, 3, 4, 5, 6]), + (1720326927.740401, [], [1, 2, 3, 4, 5, 6]), + (1720397880.433215, [], [1, 2, 3, 4, 5, 6]), + (1720397991.949085, [], [1, 2, 3, 4, 5, 6]), + (1720407395.883774, [], [1, 2, 3, 4, 5, 6]), + (1720478571.564518, [], [1, 2, 3, 4, 5, 6]), + (1720478573.689929, [], [1, 2, 3, 4, 5, 6]), + (1720496055.710657, [], [1, 2, 3, 4, 5, 6]), + (1720496250.423433, [], [1, 2, 3, 4, 5, 6]), + (1720496252.533919, [], [1, 2, 3, 4, 5, 6]), + (1720567595.861171, [], [1, 2, 3, 4, 5, 6]), + (1720567634.8402, [], [1, 2, 3, 4, 5, 6]), + (1720567636.90682, [], [1, 2, 3, 4, 5, 6]), + (1720652785.237133, [], [1, 2, 3, 4, 5, 6]), + (1720652836.758383, [], [1, 2, 3, 4, 5, 6]), + (1720652838.851539, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720076323.446198, [], [1, 2, 3, 4, 5, 6]), + (1720076387.202961, [], [1, 2, 3, 4, 5, 6]), + (1720076463.503404, [], [1, 2, 3, 4, 5, 6]), + (1720235511.111341, [], [1, 2, 3, 4, 5, 6]), + (1720235587.152102, [], [1, 2, 3, 4, 5, 6]), + (1720235604.104726, [], [1, 2, 3, 4, 5, 6]), + (1720235788.441489, [], [1, 2, 3, 4, 5, 6]), + (1720235897.829327, [], [1, 2, 3, 4, 5, 6]), + (1720235943.361057, [], [1, 2, 3, 4, 5, 6]), + (1720236088.122922, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719969865.146509, [], [1, 2, 3, 4, 5, 6])], + [ + (1720088372.900286, [], [1, 2, 3, 4, 5, 6]), + (1720220494.799398, [], [1, 2, 3, 4, 5, 6]), + (1720488909.409034, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720063420.61895, [], [1, 2, 3, 4, 5, 6]), + (1720320803.577679, [], [1, 2, 3, 4, 5, 6]), + (1720401999.385093, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720051291.94966, [], [1, 2, 3, 4, 5, 6]), (1720313692.587964, [], [1, 2, 3, 4, 5, 6])], + [(1720509709.170957, [], [1, 2, 3, 4, 5, 6]), (1720652928.475583, [], [1, 2, 3, 4, 5, 6])], + [(1719976500.586248, [], [1, 2, 3, 4, 5, 6])], + [(1720063184.061031, [], [1, 2, 3, 4, 5, 6])], + [(1720147998.634564, [], [1, 2, 3, 4, 5, 6]), (1720575037.093899, [], [1, 2, 3, 4, 5, 6])], + [(1720594897.858543, [], [1, 2, 3, 4, 5, 6])], + [ + (1720238660.290085, [], [1, 2, 3, 4, 5, 6]), + (1720306835.46462, [], [1, 2, 3, 4, 5, 6]), + (1720401110.356341, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719970976.422012, [], [1, 2, 3, 4, 5, 6]), + (1720051629.137902, [], [1, 2, 3, 4, 5, 6]), + (1720301759.327348, [], [1, 2, 3, 4, 5, 6]), + (1720646663.705407, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720077214.628033, [], [1, 2, 3, 4, 5, 6]), (1720579842.451312, [], [1, 2, 3, 4, 5, 6])], + [(1720403179.578891, [], [1, 2, 3, 4, 5, 6]), (1720573175.772465, [], [1, 2, 3, 4, 5, 6])], + [ + (1720418161.36421, [], [1, 2, 3, 4, 5, 6]), + (1720418260.71249, [], [1, 2, 3, 4, 5, 6]), + (1720418315.726571, [], [1, 2, 3, 4, 5, 6]), + (1720418380.031953, [], [1, 2, 3, 4, 5, 6]), + (1720491482.634884, [], [1, 2, 3, 4, 5, 6]), + (1720491528.332034, [], [1, 2, 3, 4, 5, 6]), + (1720491530.434212, [], [1, 2, 3, 4, 5, 6]), + (1720573999.084897, [], [1, 2, 3, 4, 5, 6]), + (1720574047.543743, [], [1, 2, 3, 4, 5, 6]), + (1720574049.630747, [], [1, 2, 3, 4, 5, 6]), + (1720616534.181229, [], [1, 2, 3, 4, 5, 6]), + (1720616536.227681, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719971505.975792, [], [1, 2, 3, 4, 5, 6]), + (1720309386.016213, [], [1, 2, 3, 4, 5, 6]), + (1720570539.167487, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720026981.201845, [], [1, 2, 3, 4, 5, 6]), + (1720063393.228975, [], [1, 2, 3, 4, 5, 6]), + (1720569870.489752, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720151334.562097, [], [1, 2, 3, 4, 5, 6]), + (1720311454.894847, [], [1, 2, 3, 4, 5, 6]), + (1720483363.072169, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720051176.858561, [], [1, 2, 3, 4, 5, 6]), (1720567049.0934, [], [1, 2, 3, 4, 5, 6])], + [ + (1720300278.98565, [], [1, 2, 3, 4, 5, 6]), + (1720397622.151994, [], [1, 2, 3, 4, 5, 6]), + (1720487075.583534, [], [1, 2, 3, 4, 5, 6]), + (1720572927.092976, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720588311.663694, [], [1, 2, 3, 4, 5, 6])], + [(1720323338.447054, [], [1, 2, 3, 4, 5, 6])], + [(1720499740.21156, [], [1, 2, 3, 4, 5, 6])], + [(1720239215.924016, [], [1, 2, 3, 4, 5, 6]), (1720407168.197067, [], [1, 2, 3, 4, 5, 6])], + [(1720496803.3956, [], [1, 2, 3, 4, 5, 6])], + [(1720578053.220463, [], [1, 2, 3, 4, 5, 6])], + [(1720400566.962842, [], [1, 2, 3, 4, 5, 6]), (1720652817.676145, [], [1, 2, 3, 4, 5, 6])], + [ + (1720234566.739671, [], [1, 2, 3, 4, 5, 6]), + (1720335553.524142, [], [1, 2, 3, 4, 5, 6]), + (1720576366.993741, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720058095.694475, [], [1, 2, 3, 4, 5, 6])], + [(1720499846.305083, [], [1, 2, 3, 4, 5, 6])], + [(1720069736.856188, [], [1, 2, 3, 4, 5, 6]), (1720327054.018462, [], [1, 2, 3, 4, 5, 6])], + [(1720446989.50202, [], [1, 2, 3, 4, 5, 6]), (1720579246.321269, [], [1, 2, 3, 4, 5, 6])], + [ + (1720065515.046196, [], [1, 2, 3, 4, 5, 6]), + (1720237193.252454, [], [1, 2, 3, 4, 5, 6]), + (1720402549.014306, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719965737.195869, [], [1, 2, 3, 4, 5, 6]), + (1720057334.427369, [], [1, 2, 3, 4, 5, 6]), + (1720221205.840325, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720366343.985963, [], [1, 2, 3, 4, 5, 6])], + [(1720195598.557753, [], [1, 2, 3, 4, 5, 6])], + [(1719980678.939359, [], [1, 2, 3, 4, 5, 6]), (1720144995.169791, [], [1, 2, 3, 4, 5, 6])], + [(1720583721.214132, [], [1, 2, 3, 4, 5, 6])], + [ + (1720054537.756175, [], [1, 2, 3, 4, 5, 6]), + (1720182546.976397, [], [1, 2, 3, 4, 5, 6]), + (1720415420.418491, [], [1, 2, 3, 4, 5, 6]), + (1720491014.558376, [], [1, 2, 3, 4, 5, 6]), + (1720568712.840731, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720061881.320656, [], [1, 2, 3, 4, 5, 6]), + (1720352102.705661, [], [1, 2, 3, 4, 5, 6]), + (1720405007.368123, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720578542.112522, [], [1, 2, 3, 4, 5, 6]), (1720652582.644175, [], [1, 2, 3, 4, 5, 6])], + [(1720243826.422296, [], [1, 2, 3, 4, 5, 6])], + [ + (1719980177.888359, [], [1, 2, 3, 4, 5, 6]), + (1720151191.015847, [], [1, 2, 3, 4, 5, 6]), + (1720578744.147878, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720401711.559261, [], [1, 2, 3, 4, 5, 6])], + [ + (1720166076.219132, [], [1, 2, 3, 4, 5, 6]), + (1720603429.438791, [], [1, 2, 3, 4, 5, 6]), + (1720653029.910009, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720060428.267185, [], [1, 2, 3, 4, 5, 6]), (1720330543.204866, [], [1, 2, 3, 4, 5, 6])], + [(1720023596.681592, [], [1, 2, 3, 4, 5, 6]), (1720492712.578171, [], [1, 2, 3, 4, 5, 6])], + [ + (1719969468.250895, [], [1, 2, 3, 4, 5, 6]), + (1719969673.12603, [], [1, 2, 3, 4, 5, 6]), + (1719984075.563213, [], [1, 2, 3, 4, 5, 6]), + (1719984158.542506, [], [1, 2, 3, 4, 5, 6]), + (1719984160.639094, [], [1, 2, 3, 4, 5, 6]), + (1719984161.840978, [], [1, 2, 3, 4, 5, 6]), + (1720063496.706949, [], [1, 2, 3, 4, 5, 6]), + (1720063498.73801, [], [1, 2, 3, 4, 5, 6]), + (1720071811.706281, [], [1, 2, 3, 4, 5, 6]), + (1720071892.339384, [], [1, 2, 3, 4, 5, 6]), + (1720071894.401443, [], [1, 2, 3, 4, 5, 6]), + (1720141092.670014, [], [1, 2, 3, 4, 5, 6]), + (1720141281.129314, [], [1, 2, 3, 4, 5, 6]), + (1720141283.221705, [], [1, 2, 3, 4, 5, 6]), + (1720242813.561025, [], [1, 2, 3, 4, 5, 6]), + (1720242887.120065, [], [1, 2, 3, 4, 5, 6]), + (1720242889.197122, [], [1, 2, 3, 4, 5, 6]), + (1720314744.527265, [], [1, 2, 3, 4, 5, 6]), + (1720314759.642908, [], [1, 2, 3, 4, 5, 6]), + (1720314761.711826, [], [1, 2, 3, 4, 5, 6]), + (1720371097.307425, [], [1, 2, 3, 4, 5, 6]), + (1720371255.548011, [], [1, 2, 3, 4, 5, 6]), + (1720387058.372995, [], [1, 2, 3, 4, 5, 6]), + (1720387060.502073, [], [1, 2, 3, 4, 5, 6]), + (1720404975.528018, [], [1, 2, 3, 4, 5, 6]), + (1720405017.654969, [], [1, 2, 3, 4, 5, 6]), + (1720405019.759385, [], [1, 2, 3, 4, 5, 6]), + (1720415081.995346, [], [1, 2, 3, 4, 5, 6]), + (1720415260.662438, [], [1, 2, 3, 4, 5, 6]), + (1720415262.742795, [], [1, 2, 3, 4, 5, 6]), + (1720485117.023333, [], [1, 2, 3, 4, 5, 6]), + (1720485119.095263, [], [1, 2, 3, 4, 5, 6]), + (1720499098.798714, [], [1, 2, 3, 4, 5, 6]), + (1720499235.930954, [], [1, 2, 3, 4, 5, 6]), + (1720499238.042676, [], [1, 2, 3, 4, 5, 6]), + (1720569739.038396, [], [1, 2, 3, 4, 5, 6]), + (1720569853.204944, [], [1, 2, 3, 4, 5, 6]), + (1720569855.2363, [], [1, 2, 3, 4, 5, 6]), + (1720602936.713875, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720242969.422529, [], [1, 2, 3, 4, 5, 6])], + [ + (1719964838.834772, [], [1, 2, 3, 4, 5, 6]), + (1720091821.44426, [], [1, 2, 3, 4, 5, 6]), + (1720233192.310563, [], [1, 2, 3, 4, 5, 6]), + (1720328141.79034, [], [1, 2, 3, 4, 5, 6]), + (1720496224.014897, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720152285.903631, [], [1, 2, 3, 4, 5, 6]), + (1720243334.372125, [], [1, 2, 3, 4, 5, 6]), + (1720379463.678962, [], [1, 2, 3, 4, 5, 6]), + (1720542353.379097, [], [1, 2, 3, 4, 5, 6]), + (1720649368.688756, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720226523.433881, [], [1, 2, 3, 4, 5, 6]), + (1720226623.918185, [], [1, 2, 3, 4, 5, 6]), + (1720226651.194519, [], [1, 2, 3, 4, 5, 6]), + (1720226742.683406, [], [1, 2, 3, 4, 5, 6]), + (1720330187.550014, [], [1, 2, 3, 4, 5, 6]), + (1720330210.231169, [], [1, 2, 3, 4, 5, 6]), + (1720410272.539479, [], [1, 2, 3, 4, 5, 6]), + (1720410274.655647, [], [1, 2, 3, 4, 5, 6]), + (1720480303.2625, [], [1, 2, 3, 4, 5, 6]), + (1720480415.671856, [], [1, 2, 3, 4, 5, 6]), + (1720480417.738288, [], [1, 2, 3, 4, 5, 6]), + (1720504693.498524, [], [1, 2, 3, 4, 5, 6]), + (1720504764.21831, [], [1, 2, 3, 4, 5, 6]), + (1720504766.268173, [], [1, 2, 3, 4, 5, 6]), + (1720568377.567722, [], [1, 2, 3, 4, 5, 6]), + (1720568452.706691, [], [1, 2, 3, 4, 5, 6]), + (1720568454.778127, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720150751.139246, [], [1, 2, 3, 4, 5, 6])], + [ + (1720064945.077586, [], [1, 2, 3, 4, 5, 6]), + (1720176851.77124, [], [1, 2, 3, 4, 5, 6]), + (1720413751.53369, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720067667.982332, [], [1, 2, 3, 4, 5, 6]), + (1720498323.491767, [], [1, 2, 3, 4, 5, 6]), + (1720640332.912224, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720586416.962046, [], [1, 2, 3, 4, 5, 6])], + [(1720403065.106609, [], [1, 2, 3, 4, 5, 6]), (1720468529.097883, [], [1, 2, 3, 4, 5, 6])], + [(1719976409.626599, [], [1, 2, 3, 4, 5, 6]), (1720617974.74258, [], [1, 2, 3, 4, 5, 6])], + [(1720155789.338418, [], [1, 2, 3, 4, 5, 6])], + [(1719965523.519862, [], [1, 2, 3, 4, 5, 6])], + [(1720492317.02938, [], [1, 2, 3, 4, 5, 6])], + [ + (1719971602.527103, [], [1, 2, 3, 4, 5, 6]), + (1720069918.036547, [], [1, 2, 3, 4, 5, 6]), + (1720149900.77775, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720143447.493022, [], [1, 2, 3, 4, 5, 6]), (1720292005.708479, [], [1, 2, 3, 4, 5, 6])], + [(1720316731.010367, [], [1, 2, 3, 4, 5, 6])], + [(1720061643.180585, [], [1, 2, 3, 4, 5, 6])], + [ + (1719998587.453659, [], [1, 2, 3, 4, 5, 6]), + (1720141642.077196, [], [1, 2, 3, 4, 5, 6]), + (1720308402.56405, [], [1, 2, 3, 4, 5, 6]), + (1720416584.28358, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720587211.681421, [], [1, 2, 3, 4, 5, 6])], + [(1720155992.271335, [], [1, 2, 3, 4, 5, 6]), (1720572458.818891, [], [1, 2, 3, 4, 5, 6])], + [(1720052898.053761, [], [1, 2, 3, 4, 5, 6])], + [(1720221610.587492, [], [1, 2, 3, 4, 5, 6]), (1720312064.403238, [], [1, 2, 3, 4, 5, 6])], + [ + (1720147178.948005, [], [1, 2, 3, 4, 5, 6]), + (1720315892.264762, [], [1, 2, 3, 4, 5, 6]), + (1720484335.142158, [], [1, 2, 3, 4, 5, 6]), + (1720625426.867126, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720225964.225383, [], [1, 2, 3, 4, 5, 6]), (1720486617.901554, [], [1, 2, 3, 4, 5, 6])], + [(1720064206.744859, [], [1, 2, 3, 4, 5, 6]), (1720148855.512919, [], [1, 2, 3, 4, 5, 6])], + [ + (1719978022.859036, [], [1, 2, 3, 4, 5, 6]), + (1720225065.814898, [], [1, 2, 3, 4, 5, 6]), + (1720379679.901663, [], [1, 2, 3, 4, 5, 6]), + (1720486481.106043, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720072107.565425, [], [1, 2, 3, 4, 5, 6]), (1720244247.598994, [], [1, 2, 3, 4, 5, 6])], + [(1720418305.8177, [], [1, 2, 3, 4, 5, 6])], + [(1719982059.871483, [], [1, 2, 3, 4, 5, 6])], + [(1719978817.113542, [], [1, 2, 3, 4, 5, 6])], + [(1720138229.977179, [], [1, 2, 3, 4, 5, 6])], + [(1720069967.627991, [], [1, 2, 3, 4, 5, 6]), (1720327850.533176, [], [1, 2, 3, 4, 5, 6])], + [(1720047008.96098, [], [1, 2, 3, 4, 5, 6]), (1720399885.70332, [], [1, 2, 3, 4, 5, 6])], + [(1720241326.183043, [], [1, 2, 3, 4, 5, 6])], + [ + (1719974240.573502, [], [1, 2, 3, 4, 5, 6]), + (1719974319.570753, [], [1, 2, 3, 4, 5, 6]), + (1719974321.645618, [], [1, 2, 3, 4, 5, 6]), + (1720023281.111928, [], [1, 2, 3, 4, 5, 6]), + (1720023484.341612, [], [1, 2, 3, 4, 5, 6]), + (1720023486.442918, [], [1, 2, 3, 4, 5, 6]), + (1720056803.840688, [], [1, 2, 3, 4, 5, 6]), + (1720056805.906524, [], [1, 2, 3, 4, 5, 6]), + (1720056810.106719, [], [1, 2, 3, 4, 5, 6]), + (1720056812.20004, [], [1, 2, 3, 4, 5, 6]), + (1720070833.346034, [], [1, 2, 3, 4, 5, 6]), + (1720070835.405627, [], [1, 2, 3, 4, 5, 6]), + (1720070839.751918, [], [1, 2, 3, 4, 5, 6]), + (1720070841.8631, [], [1, 2, 3, 4, 5, 6]), + (1720143274.991396, [], [1, 2, 3, 4, 5, 6]), + (1720143497.358536, [], [1, 2, 3, 4, 5, 6]), + (1720205396.067954, [], [1, 2, 3, 4, 5, 6]), + (1720205567.621928, [], [1, 2, 3, 4, 5, 6]), + (1720235968.291387, [], [1, 2, 3, 4, 5, 6]), + (1720236089.012578, [], [1, 2, 3, 4, 5, 6]), + (1720236091.096483, [], [1, 2, 3, 4, 5, 6]), + (1720299309.484376, [], [1, 2, 3, 4, 5, 6]), + (1720299348.029909, [], [1, 2, 3, 4, 5, 6]), + (1720299350.111093, [], [1, 2, 3, 4, 5, 6]), + (1720320587.866423, [], [1, 2, 3, 4, 5, 6]), + (1720320589.944508, [], [1, 2, 3, 4, 5, 6]), + (1720400218.389368, [], [1, 2, 3, 4, 5, 6]), + (1720400220.487059, [], [1, 2, 3, 4, 5, 6]), + (1720492976.763916, [], [1, 2, 3, 4, 5, 6]), + (1720493019.503907, [], [1, 2, 3, 4, 5, 6]), + (1720493021.579652, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720122662.758187, [], [1, 2, 3, 4, 5, 6])], + [ + (1720005466.226878, [], [1, 2, 3, 4, 5, 6]), + (1720152200.991189, [], [1, 2, 3, 4, 5, 6]), + (1720481835.844194, [], [1, 2, 3, 4, 5, 6]), + (1720580183.828864, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720070662.89957, [], [1, 2, 3, 4, 5, 6])], + [(1720142183.650314, [], [1, 2, 3, 4, 5, 6]), (1720411252.551579, [], [1, 2, 3, 4, 5, 6])], + [(1720072507.675137, [], [1, 2, 3, 4, 5, 6]), (1720238913.204492, [], [1, 2, 3, 4, 5, 6])], + [(1720218011.114445, [], [1, 2, 3, 4, 5, 6])], + [(1720302385.101079, [], [1, 2, 3, 4, 5, 6])], + [(1720570186.762002, [], [1, 2, 3, 4, 5, 6])], + [(1720157661.668233, [], [1, 2, 3, 4, 5, 6])], + [ + (1720002096.475822, [], [1, 2, 3, 4, 5, 6]), + (1720002098.548489, [], [1, 2, 3, 4, 5, 6]), + (1720066947.715378, [], [1, 2, 3, 4, 5, 6]), + (1720066949.844651, [], [1, 2, 3, 4, 5, 6]), + (1720105199.7729, [], [1, 2, 3, 4, 5, 6]), + (1720105370.978068, [], [1, 2, 3, 4, 5, 6]), + (1720105373.072451, [], [1, 2, 3, 4, 5, 6]), + (1720134686.761223, [], [1, 2, 3, 4, 5, 6]), + (1720134688.87969, [], [1, 2, 3, 4, 5, 6]), + (1720154938.58341, [], [1, 2, 3, 4, 5, 6]), + (1720219977.007902, [], [1, 2, 3, 4, 5, 6]), + (1720219979.064108, [], [1, 2, 3, 4, 5, 6]), + (1720250673.20197, [], [1, 2, 3, 4, 5, 6]), + (1720250796.014257, [], [1, 2, 3, 4, 5, 6]), + (1720313894.440119, [], [1, 2, 3, 4, 5, 6]), + (1720313896.495204, [], [1, 2, 3, 4, 5, 6]), + (1720313896.774105, [], [1, 2, 3, 4, 5, 6]), + (1720396853.1643, [], [1, 2, 3, 4, 5, 6]), + (1720396855.257174, [], [1, 2, 3, 4, 5, 6]), + (1720489852.456446, [], [1, 2, 3, 4, 5, 6]), + (1720489986.86769, [], [1, 2, 3, 4, 5, 6]), + (1720569682.852233, [], [1, 2, 3, 4, 5, 6]), + (1720569767.225411, [], [1, 2, 3, 4, 5, 6]), + (1720569769.334261, [], [1, 2, 3, 4, 5, 6]), + (1720581192.763754, [], [1, 2, 3, 4, 5, 6]), + (1720581262.544992, [], [1, 2, 3, 4, 5, 6]), + (1720581264.629216, [], [1, 2, 3, 4, 5, 6]), + (1720581266.203535, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720226441.129883, [], [1, 2, 3, 4, 5, 6]), + (1720226519.948161, [], [1, 2, 3, 4, 5, 6]), + (1720226639.444269, [], [1, 2, 3, 4, 5, 6]), + (1720226731.198095, [], [1, 2, 3, 4, 5, 6]), + (1720226779.385516, [], [1, 2, 3, 4, 5, 6]), + (1720226788.674966, [], [1, 2, 3, 4, 5, 6]), + (1720226923.560385, [], [1, 2, 3, 4, 5, 6]), + (1720487951.436457, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720038002.665527, [], [1, 2, 3, 4, 5, 6]), + (1720488750.698306, [], [1, 2, 3, 4, 5, 6]), + (1720589885.270178, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720580260.501907, [], [1, 2, 3, 4, 5, 6])], + [(1720237662.32438, [], [1, 2, 3, 4, 5, 6]), (1720309542.106408, [], [1, 2, 3, 4, 5, 6])], + [(1720573441.412558, [], [1, 2, 3, 4, 5, 6])], + [(1720598006.382998, [], [1, 2, 3, 4, 5, 6])], + [(1720249262.676111, [], [1, 2, 3, 4, 5, 6])], + [ + (1719968867.096755, [], [1, 2, 3, 4, 5, 6]), + (1720406515.635103, [], [1, 2, 3, 4, 5, 6]), + (1720568473.896114, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720063183.618875, [], [1, 2, 3, 4, 5, 6])], + [(1720313109.473272, [], [1, 2, 3, 4, 5, 6]), (1720418083.174351, [], [1, 2, 3, 4, 5, 6])], + [(1720106024.609, [], [1, 2, 3, 4, 5, 6])], + [(1720058454.321955, [], [1, 2, 3, 4, 5, 6])], + [ + (1720062639.625591, [], [1, 2, 3, 4, 5, 6]), + (1720221670.858026, [], [1, 2, 3, 4, 5, 6]), + (1720496857.495022, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719974856.395033, [], [1, 2, 3, 4, 5, 6]), + (1720053608.017225, [], [1, 2, 3, 4, 5, 6]), + (1720053669.477447, [], [1, 2, 3, 4, 5, 6]), + (1720053671.560518, [], [1, 2, 3, 4, 5, 6]), + (1720062649.578553, [], [1, 2, 3, 4, 5, 6]), + (1720062651.66265, [], [1, 2, 3, 4, 5, 6]), + (1720062653.377042, [], [1, 2, 3, 4, 5, 6]), + (1720062655.44862, [], [1, 2, 3, 4, 5, 6]), + (1720082692.43711, [], [1, 2, 3, 4, 5, 6]), + (1720082771.847834, [], [1, 2, 3, 4, 5, 6]), + (1720153671.808662, [], [1, 2, 3, 4, 5, 6]), + (1720153713.489374, [], [1, 2, 3, 4, 5, 6]), + (1720153715.599402, [], [1, 2, 3, 4, 5, 6]), + (1720153716.387598, [], [1, 2, 3, 4, 5, 6]), + (1720153718.446591, [], [1, 2, 3, 4, 5, 6]), + (1720239246.152588, [], [1, 2, 3, 4, 5, 6]), + (1720239248.241024, [], [1, 2, 3, 4, 5, 6]), + (1720239250.77294, [], [1, 2, 3, 4, 5, 6]), + (1720309956.683905, [], [1, 2, 3, 4, 5, 6]), + (1720310071.602061, [], [1, 2, 3, 4, 5, 6]), + (1720325462.049867, [], [1, 2, 3, 4, 5, 6]), + (1720325503.88631, [], [1, 2, 3, 4, 5, 6]), + (1720325506.004479, [], [1, 2, 3, 4, 5, 6]), + (1720412942.492135, [], [1, 2, 3, 4, 5, 6]), + (1720458829.36376, [], [1, 2, 3, 4, 5, 6]), + (1720458949.800013, [], [1, 2, 3, 4, 5, 6]), + (1720458951.887504, [], [1, 2, 3, 4, 5, 6]), + (1720492407.820081, [], [1, 2, 3, 4, 5, 6]), + (1720492444.404348, [], [1, 2, 3, 4, 5, 6]), + (1720492446.466946, [], [1, 2, 3, 4, 5, 6]), + (1720575932.543872, [], [1, 2, 3, 4, 5, 6]), + (1720576033.410802, [], [1, 2, 3, 4, 5, 6]), + (1720576035.469127, [], [1, 2, 3, 4, 5, 6]), + (1720576036.44253, [], [1, 2, 3, 4, 5, 6]), + (1720647287.059052, [], [1, 2, 3, 4, 5, 6]), + (1720647289.160943, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720330899.088291, [], [1, 2, 3, 4, 5, 6])], + [ + (1720081793.462679, [], [1, 2, 3, 4, 5, 6]), + (1720081976.131384, [], [1, 2, 3, 4, 5, 6]), + (1720081976.187009, [], [1, 2, 3, 4, 5, 6]), + (1720081980.325716, [], [1, 2, 3, 4, 5, 6]), + (1720091170.691618, [], [1, 2, 3, 4, 5, 6]), + (1720091356.840132, [], [1, 2, 3, 4, 5, 6]), + (1720091358.928927, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720238399.047753, [], [1, 2, 3, 4, 5, 6]), (1720567368.683366, [], [1, 2, 3, 4, 5, 6])], + [(1720583646.429286, [], [1, 2, 3, 4, 5, 6])], + [(1720330049.16818, [], [1, 2, 3, 4, 5, 6])], + [(1720578076.222297, [], [1, 2, 3, 4, 5, 6])], + [(1719972331.112716, [], [1, 2, 3, 4, 5, 6]), (1720051745.533132, [], [1, 2, 3, 4, 5, 6])], + [(1720146463.601388, [], [1, 2, 3, 4, 5, 6]), (1720474541.840768, [], [1, 2, 3, 4, 5, 6])], + [ + (1720051898.891617, [], [1, 2, 3, 4, 5, 6]), + (1720173013.609275, [], [1, 2, 3, 4, 5, 6]), + (1720320493.657042, [], [1, 2, 3, 4, 5, 6]), + (1720345690.851927, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720408818.140609, [], [1, 2, 3, 4, 5, 6]), (1720486964.900766, [], [1, 2, 3, 4, 5, 6])], + [ + (1720043728.363322, [], [1, 2, 3, 4, 5, 6]), + (1720311975.271982, [], [1, 2, 3, 4, 5, 6]), + (1720571578.431424, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719972681.535491, [], [1, 2, 3, 4, 5, 6]), (1720481018.937328, [], [1, 2, 3, 4, 5, 6])], + [ + (1720026382.553301, [], [1, 2, 3, 4, 5, 6]), + (1720149100.706808, [], [1, 2, 3, 4, 5, 6]), + (1720307942.507634, [], [1, 2, 3, 4, 5, 6]), + (1720570495.6023, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720488173.923373, [], [1, 2, 3, 4, 5, 6]), (1720568447.027354, [], [1, 2, 3, 4, 5, 6])], + [(1720567270.126647, [], [1, 2, 3, 4, 5, 6])], + [(1720499324.726509, [], [1, 2, 3, 4, 5, 6])], + [(1719967975.358552, [], [1, 2, 3, 4, 5, 6]), (1720326137.056104, [], [1, 2, 3, 4, 5, 6])], + [(1720328587.433829, [], [1, 2, 3, 4, 5, 6])], + [ + (1720055336.001624, [], [1, 2, 3, 4, 5, 6]), + (1720138863.165013, [], [1, 2, 3, 4, 5, 6]), + (1720395924.519387, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720069032.516709, [], [1, 2, 3, 4, 5, 6])], + [(1720055010.518163, [], [1, 2, 3, 4, 5, 6]), (1720099339.150894, [], [1, 2, 3, 4, 5, 6])], + [(1720585549.317132, [], [1, 2, 3, 4, 5, 6])], + [ + (1720409221.504822, [], [1, 2, 3, 4, 5, 6]), + (1720519728.003909, [], [1, 2, 3, 4, 5, 6]), + (1720567616.396835, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720244626.143033, [], [1, 2, 3, 4, 5, 6]), + (1720408635.777109, [], [1, 2, 3, 4, 5, 6]), + (1720575490.310245, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720650559.825556, [], [1, 2, 3, 4, 5, 6])], + [(1720582328.399682, [], [1, 2, 3, 4, 5, 6])], + [(1720394085.218172, [], [1, 2, 3, 4, 5, 6])], + [ + (1720066065.444143, [], [1, 2, 3, 4, 5, 6]), + (1720191993.66672, [], [1, 2, 3, 4, 5, 6]), + (1720491329.586545, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720071639.073353, [], [1, 2, 3, 4, 5, 6]), + (1720147406.207381, [], [1, 2, 3, 4, 5, 6]), + (1720311129.234658, [], [1, 2, 3, 4, 5, 6]), + (1720573354.037576, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720240131.733437, [], [1, 2, 3, 4, 5, 6])], + [ + (1719984704.452873, [], [1, 2, 3, 4, 5, 6]), + (1720154844.587696, [], [1, 2, 3, 4, 5, 6]), + (1720322613.231449, [], [1, 2, 3, 4, 5, 6]), + (1720569714.825725, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720486946.256934, [], [1, 2, 3, 4, 5, 6]), (1720647691.877446, [], [1, 2, 3, 4, 5, 6])], + [ + (1719993805.960143, [], [1, 2, 3, 4, 5, 6]), + (1720143738.25635, [], [1, 2, 3, 4, 5, 6]), + (1720309437.19401, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720398437.265534, [], [1, 2, 3, 4, 5, 6]), + (1720509793.976335, [], [1, 2, 3, 4, 5, 6]), + (1720629661.586274, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720062670.596985, [], [1, 2, 3, 4, 5, 6]), (1720438472.029491, [], [1, 2, 3, 4, 5, 6])], + [(1720329032.038648, [], [1, 2, 3, 4, 5, 6])], + [(1719984110.370743, [], [1, 2, 3, 4, 5, 6])], + [(1719986035.664892, [], [1, 2, 3, 4, 5, 6]), (1720241225.374801, [], [1, 2, 3, 4, 5, 6])], + [ + (1720105075.810739, [], [1, 2, 3, 4, 5, 6]), + (1720150414.362845, [], [1, 2, 3, 4, 5, 6]), + (1720308174.216706, [], [1, 2, 3, 4, 5, 6]), + (1720412581.759663, [], [1, 2, 3, 4, 5, 6]), + (1720567554.209717, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720537016.009206, [], [1, 2, 3, 4, 5, 6]), (1720589963.468469, [], [1, 2, 3, 4, 5, 6])], + [(1720487391.820061, [], [1, 2, 3, 4, 5, 6])], + [ + (1719975756.8334, [], [1, 2, 3, 4, 5, 6]), + (1720155078.652264, [], [1, 2, 3, 4, 5, 6]), + (1720400698.199527, [], [1, 2, 3, 4, 5, 6]), + (1720496832.791723, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720018681.490249, [], [1, 2, 3, 4, 5, 6]), + (1720136925.765051, [], [1, 2, 3, 4, 5, 6]), + (1720299976.82238, [], [1, 2, 3, 4, 5, 6]), + (1720569027.317754, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720154725.361924, [], [1, 2, 3, 4, 5, 6]), + (1720243905.535885, [], [1, 2, 3, 4, 5, 6]), + (1720315512.512864, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720588095.521256, [], [1, 2, 3, 4, 5, 6])], + [(1719983442.727069, [], [1, 2, 3, 4, 5, 6]), (1720152453.391462, [], [1, 2, 3, 4, 5, 6])], + [(1720405182.158931, [], [1, 2, 3, 4, 5, 6]), (1720575807.583852, [], [1, 2, 3, 4, 5, 6])], + [(1720069878.030512, [], [1, 2, 3, 4, 5, 6]), (1720168504.534482, [], [1, 2, 3, 4, 5, 6])], + [(1720239119.238416, [], [1, 2, 3, 4, 5, 6])], + [(1720576430.211377, [], [1, 2, 3, 4, 5, 6])], + [ + (1719972965.48613, [], [1, 2, 3, 4, 5, 6]), + (1720240976.867243, [], [1, 2, 3, 4, 5, 6]), + (1720404977.394327, [], [1, 2, 3, 4, 5, 6]), + (1720537004.359466, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720487019.528315, [], [1, 2, 3, 4, 5, 6]), (1720581959.239135, [], [1, 2, 3, 4, 5, 6])], + [(1720652926.990055, [], [1, 2, 3, 4, 5, 6])], + [ + (1720052099.960777, [], [1, 2, 3, 4, 5, 6]), + (1720138855.98453, [], [1, 2, 3, 4, 5, 6]), + (1720138921.586511, [], [1, 2, 3, 4, 5, 6]), + (1720139014.744606, [], [1, 2, 3, 4, 5, 6]), + (1720139182.595009, [], [1, 2, 3, 4, 5, 6]), + (1720139192.3206, [], [1, 2, 3, 4, 5, 6]), + (1720581909.908771, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720065955.899244, [], [1, 2, 3, 4, 5, 6]), + (1720236828.579322, [], [1, 2, 3, 4, 5, 6]), + (1720308640.597753, [], [1, 2, 3, 4, 5, 6]), + (1720579634.738256, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719980159.276761, [], [1, 2, 3, 4, 5, 6]), + (1720308552.424302, [], [1, 2, 3, 4, 5, 6]), + (1720653256.063729, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719985271.443965, [], [1, 2, 3, 4, 5, 6]), + (1720220543.115385, [], [1, 2, 3, 4, 5, 6]), + (1720315297.143816, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720330015.747888, [], [1, 2, 3, 4, 5, 6]), (1720481610.656596, [], [1, 2, 3, 4, 5, 6])], + [(1720224041.283015, [], [1, 2, 3, 4, 5, 6]), (1720567371.834809, [], [1, 2, 3, 4, 5, 6])], + [ + (1720244217.827624, [], [1, 2, 3, 4, 5, 6]), + (1720402557.505715, [], [1, 2, 3, 4, 5, 6]), + (1720502124.284452, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720415547.576014, [], [1, 2, 3, 4, 5, 6]), (1720531682.711315, [], [1, 2, 3, 4, 5, 6])], + [(1720407411.272993, [], [1, 2, 3, 4, 5, 6]), (1720574508.629738, [], [1, 2, 3, 4, 5, 6])], + [(1720257290.163002, [], [1, 2, 3, 4, 5, 6]), (1720492975.717018, [], [1, 2, 3, 4, 5, 6])], + [(1720144145.711511, [], [1, 2, 3, 4, 5, 6]), (1720401163.125022, [], [1, 2, 3, 4, 5, 6])], + [(1720355601.346282, [], [1, 2, 3, 4, 5, 6]), (1720572069.286814, [], [1, 2, 3, 4, 5, 6])], + [ + (1719968339.260056, [], [1, 2, 3, 4, 5, 6]), + (1719968341.365428, [], [1, 2, 3, 4, 5, 6]), + (1719968343.993978, [], [1, 2, 3, 4, 5, 6]), + (1719968346.031381, [], [1, 2, 3, 4, 5, 6]), + (1719968349.431552, [], [1, 2, 3, 4, 5, 6]), + (1719968351.559689, [], [1, 2, 3, 4, 5, 6]), + (1719976134.941126, [], [1, 2, 3, 4, 5, 6]), + (1719976278.477066, [], [1, 2, 3, 4, 5, 6]), + (1719976280.56988, [], [1, 2, 3, 4, 5, 6]), + (1720052757.855887, [], [1, 2, 3, 4, 5, 6]), + (1720052759.915085, [], [1, 2, 3, 4, 5, 6]), + (1720110094.313929, [], [1, 2, 3, 4, 5, 6]), + (1720142517.707832, [], [1, 2, 3, 4, 5, 6]), + (1720142570.10559, [], [1, 2, 3, 4, 5, 6]), + (1720142572.151412, [], [1, 2, 3, 4, 5, 6]), + (1720142576.179553, [], [1, 2, 3, 4, 5, 6]), + (1720237055.807105, [], [1, 2, 3, 4, 5, 6]), + (1720237203.321556, [], [1, 2, 3, 4, 5, 6]), + (1720237205.419793, [], [1, 2, 3, 4, 5, 6]), + (1720316912.566247, [], [1, 2, 3, 4, 5, 6]), + (1720317055.804333, [], [1, 2, 3, 4, 5, 6]), + (1720317057.925258, [], [1, 2, 3, 4, 5, 6]), + (1720317058.399149, [], [1, 2, 3, 4, 5, 6]), + (1720317060.481448, [], [1, 2, 3, 4, 5, 6]), + (1720393849.766518, [], [1, 2, 3, 4, 5, 6]), + (1720393921.300236, [], [1, 2, 3, 4, 5, 6]), + (1720406796.853939, [], [1, 2, 3, 4, 5, 6]), + (1720406798.933918, [], [1, 2, 3, 4, 5, 6]), + (1720482599.505433, [], [1, 2, 3, 4, 5, 6]), + (1720482663.255581, [], [1, 2, 3, 4, 5, 6]), + (1720482665.27704, [], [1, 2, 3, 4, 5, 6]), + (1720492023.699542, [], [1, 2, 3, 4, 5, 6]), + (1720492025.737059, [], [1, 2, 3, 4, 5, 6]), + (1720500142.609638, [], [1, 2, 3, 4, 5, 6]), + (1720500250.895423, [], [1, 2, 3, 4, 5, 6]), + (1720525828.241699, [], [1, 2, 3, 4, 5, 6]), + (1720525830.335737, [], [1, 2, 3, 4, 5, 6]), + (1720543473.185403, [], [1, 2, 3, 4, 5, 6]), + (1720543629.193018, [], [1, 2, 3, 4, 5, 6]), + (1720543631.258205, [], [1, 2, 3, 4, 5, 6]), + (1720566115.315069, [], [1, 2, 3, 4, 5, 6]), + (1720566235.252146, [], [1, 2, 3, 4, 5, 6]), + (1720566237.371673, [], [1, 2, 3, 4, 5, 6]), + (1720566239.622085, [], [1, 2, 3, 4, 5, 6]), + (1720566241.74061, [], [1, 2, 3, 4, 5, 6]), + (1720652422.12376, [], [1, 2, 3, 4, 5, 6]), + (1720652589.161105, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720332940.235911, [], [1, 2, 3, 4, 5, 6])], + [(1720065527.859172, [], [1, 2, 3, 4, 5, 6])], + [(1720568368.543876, [], [1, 2, 3, 4, 5, 6]), (1720635472.219669, [], [1, 2, 3, 4, 5, 6])], + [ + (1719968828.538353, [], [1, 2, 3, 4, 5, 6]), + (1720051948.377763, [], [1, 2, 3, 4, 5, 6]), + (1720299205.556357, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720235830.179481, [], [1, 2, 3, 4, 5, 6]), + (1720235953.147018, [], [1, 2, 3, 4, 5, 6]), + (1720236018.20472, [], [1, 2, 3, 4, 5, 6]), + (1720236089.138704, [], [1, 2, 3, 4, 5, 6]), + (1720236119.593712, [], [1, 2, 3, 4, 5, 6]), + (1720236141.83499, [], [1, 2, 3, 4, 5, 6]), + (1720311050.201652, [], [1, 2, 3, 4, 5, 6]), + (1720311052.280309, [], [1, 2, 3, 4, 5, 6]), + (1720395484.534496, [], [1, 2, 3, 4, 5, 6]), + (1720491406.080018, [], [1, 2, 3, 4, 5, 6]), + (1720491430.598198, [], [1, 2, 3, 4, 5, 6]), + (1720491432.661821, [], [1, 2, 3, 4, 5, 6]), + (1720572678.481313, [], [1, 2, 3, 4, 5, 6]), + (1720572808.45491, [], [1, 2, 3, 4, 5, 6]), + (1720572810.563889, [], [1, 2, 3, 4, 5, 6]), + (1720603175.70942, [], [1, 2, 3, 4, 5, 6]), + (1720603202.06502, [], [1, 2, 3, 4, 5, 6]), + (1720603204.156746, [], [1, 2, 3, 4, 5, 6]), + (1720652491.405509, [], [1, 2, 3, 4, 5, 6]), + (1720652598.039059, [], [1, 2, 3, 4, 5, 6]), + (1720652600.082367, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720153161.725292, [], [1, 2, 3, 4, 5, 6])], + [(1720494662.408865, [], [1, 2, 3, 4, 5, 6]), (1720568597.855422, [], [1, 2, 3, 4, 5, 6])], + [(1720416466.753208, [], [1, 2, 3, 4, 5, 6])], + [(1719978991.390168, [], [1, 2, 3, 4, 5, 6])], + [(1720072031.976782, [], [1, 2, 3, 4, 5, 6]), (1720584690.251602, [], [1, 2, 3, 4, 5, 6])], + [(1720575076.950008, [], [1, 2, 3, 4, 5, 6])], + [(1720231712.798613, [], [1, 2, 3, 4, 5, 6]), (1720398045.987903, [], [1, 2, 3, 4, 5, 6])], + [(1720056840.047309, [], [1, 2, 3, 4, 5, 6]), (1720625851.477544, [], [1, 2, 3, 4, 5, 6])], + [ + (1720053429.359158, [], [1, 2, 3, 4, 5, 6]), + (1720053508.29626, [], [1, 2, 3, 4, 5, 6]), + (1720053570.605172, [], [1, 2, 3, 4, 5, 6]), + (1720053639.380777, [], [1, 2, 3, 4, 5, 6]), + (1720066181.00997, [], [1, 2, 3, 4, 5, 6]), + (1720066215.053405, [], [1, 2, 3, 4, 5, 6]), + (1720066217.116517, [], [1, 2, 3, 4, 5, 6]), + (1720143779.66573, [], [1, 2, 3, 4, 5, 6]), + (1720143781.713526, [], [1, 2, 3, 4, 5, 6]), + (1720222105.35254, [], [1, 2, 3, 4, 5, 6]), + (1720317654.056711, [], [1, 2, 3, 4, 5, 6]), + (1720317708.563828, [], [1, 2, 3, 4, 5, 6]), + (1720480329.549535, [], [1, 2, 3, 4, 5, 6]), + (1720480520.417693, [], [1, 2, 3, 4, 5, 6]), + (1720480522.54519, [], [1, 2, 3, 4, 5, 6]), + (1720480523.499363, [], [1, 2, 3, 4, 5, 6]), + (1720577037.242221, [], [1, 2, 3, 4, 5, 6]), + (1720577039.306434, [], [1, 2, 3, 4, 5, 6]), + (1720639329.717862, [], [1, 2, 3, 4, 5, 6]), + (1720639469.331454, [], [1, 2, 3, 4, 5, 6]), + (1720639471.36127, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720024453.629276, [], [1, 2, 3, 4, 5, 6]), (1720316176.77774, [], [1, 2, 3, 4, 5, 6])], + [ + (1720068883.919311, [], [1, 2, 3, 4, 5, 6]), + (1720319773.101818, [], [1, 2, 3, 4, 5, 6]), + (1720586957.747953, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720056927.404781, [], [1, 2, 3, 4, 5, 6])], + [ + (1720054270.638059, [], [1, 2, 3, 4, 5, 6]), + (1720153565.870327, [], [1, 2, 3, 4, 5, 6]), + (1720334693.538652, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720152641.309698, [], [1, 2, 3, 4, 5, 6]), (1720576318.23299, [], [1, 2, 3, 4, 5, 6])], + [ + (1720404934.034946, [], [1, 2, 3, 4, 5, 6]), + (1720476937.980269, [], [1, 2, 3, 4, 5, 6]), + (1720652925.317718, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720583121.659416, [], [1, 2, 3, 4, 5, 6])], + [(1719974918.036807, [], [1, 2, 3, 4, 5, 6])], + [(1720052839.789235, [], [1, 2, 3, 4, 5, 6]), (1720309756.887443, [], [1, 2, 3, 4, 5, 6])], + [(1720214816.601474, [], [1, 2, 3, 4, 5, 6]), (1720628643.350003, [], [1, 2, 3, 4, 5, 6])], + [(1719990813.089809, [], [1, 2, 3, 4, 5, 6]), (1720312746.860016, [], [1, 2, 3, 4, 5, 6])], + [(1720072040.294779, [], [1, 2, 3, 4, 5, 6]), (1720573666.820699, [], [1, 2, 3, 4, 5, 6])], + [(1720221192.01312, [], [1, 2, 3, 4, 5, 6])], + [(1720143165.437476, [], [1, 2, 3, 4, 5, 6])], + [ + (1719965593.95106, [], [1, 2, 3, 4, 5, 6]), + (1720062861.422969, [], [1, 2, 3, 4, 5, 6]), + (1720503564.270709, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720310060.645551, [], [1, 2, 3, 4, 5, 6]), + (1720482309.217878, [], [1, 2, 3, 4, 5, 6]), + (1720591036.349001, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720157768.51272, [], [1, 2, 3, 4, 5, 6])], + [(1720149445.473387, [], [1, 2, 3, 4, 5, 6]), (1720438026.869011, [], [1, 2, 3, 4, 5, 6])], + [ + (1720060489.013219, [], [1, 2, 3, 4, 5, 6]), + (1720415886.629529, [], [1, 2, 3, 4, 5, 6]), + (1720580867.871164, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720414273.276015, [], [1, 2, 3, 4, 5, 6]), (1720487097.033528, [], [1, 2, 3, 4, 5, 6])], + [(1719966569.062726, [], [1, 2, 3, 4, 5, 6])], + [ + (1719966533.368239, [], [1, 2, 3, 4, 5, 6]), + (1720318867.369239, [], [1, 2, 3, 4, 5, 6]), + (1720424434.84027, [], [1, 2, 3, 4, 5, 6]), + (1720566607.607309, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720608001.068576, [], [1, 2, 3, 4, 5, 6])], + [ + (1720062804.379267, [], [1, 2, 3, 4, 5, 6]), + (1720226534.49236, [], [1, 2, 3, 4, 5, 6]), + (1720321084.499585, [], [1, 2, 3, 4, 5, 6]), + (1720450358.303395, [], [1, 2, 3, 4, 5, 6]), + (1720577114.020932, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720138095.209939, [], [1, 2, 3, 4, 5, 6])], + [ + (1720109654.682718, [], [1, 2, 3, 4, 5, 6]), + (1720109832.815741, [], [1, 2, 3, 4, 5, 6]), + (1720109959.88633, [], [1, 2, 3, 4, 5, 6]), + (1720110033.900336, [], [1, 2, 3, 4, 5, 6]), + (1720110090.159457, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719973270.54798, [], [1, 2, 3, 4, 5, 6])], + [(1720404747.93651, [], [1, 2, 3, 4, 5, 6])], + [(1719975896.00888, [], [1, 2, 3, 4, 5, 6]), (1720412877.994145, [], [1, 2, 3, 4, 5, 6])], + [(1720488415.324703, [], [1, 2, 3, 4, 5, 6])], + [ + (1719979329.168652, [], [1, 2, 3, 4, 5, 6]), + (1719979433.834943, [], [1, 2, 3, 4, 5, 6]), + (1719979537.706541, [], [1, 2, 3, 4, 5, 6]), + (1720054525.517489, [], [1, 2, 3, 4, 5, 6]), + (1720054576.808031, [], [1, 2, 3, 4, 5, 6]), + (1720054637.310552, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720008269.86362, [], [1, 2, 3, 4, 5, 6]), (1720228186.690288, [], [1, 2, 3, 4, 5, 6])], + [(1719980906.390651, [], [1, 2, 3, 4, 5, 6])], + [(1720049804.519108, [], [1, 2, 3, 4, 5, 6])], + [(1719982824.989151, [], [1, 2, 3, 4, 5, 6])], + [ + (1720052793.231176, [], [1, 2, 3, 4, 5, 6]), + (1720416383.522419, [], [1, 2, 3, 4, 5, 6]), + (1720576203.462386, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720141956.096351, [], [1, 2, 3, 4, 5, 6]), (1720501761.653384, [], [1, 2, 3, 4, 5, 6])], + [(1719968214.670337, [], [1, 2, 3, 4, 5, 6])], + [ + (1720061582.741936, [], [1, 2, 3, 4, 5, 6]), + (1720148352.805998, [], [1, 2, 3, 4, 5, 6]), + (1720320650.836088, [], [1, 2, 3, 4, 5, 6]), + (1720480540.757287, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720066746.296404, [], [1, 2, 3, 4, 5, 6]), (1720470635.779369, [], [1, 2, 3, 4, 5, 6])], + [(1720036143.99876, [], [1, 2, 3, 4, 5, 6]), (1720235115.25383, [], [1, 2, 3, 4, 5, 6])], + [ + (1720325739.301864, [], [1, 2, 3, 4, 5, 6]), + (1720366758.48691, [], [1, 2, 3, 4, 5, 6]), + (1720579671.285769, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720406674.612278, [], [1, 2, 3, 4, 5, 6])], + [(1720569843.609239, [], [1, 2, 3, 4, 5, 6])], + [ + (1720060843.491976, [], [1, 2, 3, 4, 5, 6]), + (1720147665.305258, [], [1, 2, 3, 4, 5, 6]), + (1720501529.904655, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720046373.265399, [], [1, 2, 3, 4, 5, 6]), + (1720046375.332994, [], [1, 2, 3, 4, 5, 6]), + (1720068144.411279, [], [1, 2, 3, 4, 5, 6]), + (1720068261.195225, [], [1, 2, 3, 4, 5, 6]), + (1720131629.331825, [], [1, 2, 3, 4, 5, 6]), + (1720131717.731289, [], [1, 2, 3, 4, 5, 6]), + (1720143208.108341, [], [1, 2, 3, 4, 5, 6]), + (1720224375.336718, [], [1, 2, 3, 4, 5, 6]), + (1720224489.89131, [], [1, 2, 3, 4, 5, 6]), + (1720239983.991454, [], [1, 2, 3, 4, 5, 6]), + (1720240023.957522, [], [1, 2, 3, 4, 5, 6]), + (1720240026.023994, [], [1, 2, 3, 4, 5, 6]), + (1720288870.449116, [], [1, 2, 3, 4, 5, 6]), + (1720319014.330473, [], [1, 2, 3, 4, 5, 6]), + (1720319153.071162, [], [1, 2, 3, 4, 5, 6]), + (1720319155.11854, [], [1, 2, 3, 4, 5, 6]), + (1720356008.754634, [], [1, 2, 3, 4, 5, 6]), + (1720356170.017209, [], [1, 2, 3, 4, 5, 6]), + (1720414281.753569, [], [1, 2, 3, 4, 5, 6]), + (1720466888.126284, [], [1, 2, 3, 4, 5, 6]), + (1720466890.175399, [], [1, 2, 3, 4, 5, 6]), + (1720496852.884055, [], [1, 2, 3, 4, 5, 6]), + (1720496900.157534, [], [1, 2, 3, 4, 5, 6]), + (1720496902.257177, [], [1, 2, 3, 4, 5, 6]), + (1720517711.484252, [], [1, 2, 3, 4, 5, 6]), + (1720517785.617389, [], [1, 2, 3, 4, 5, 6]), + (1720517787.722386, [], [1, 2, 3, 4, 5, 6]), + (1720574636.301281, [], [1, 2, 3, 4, 5, 6]), + (1720574638.402501, [], [1, 2, 3, 4, 5, 6]), + (1720631962.467861, [], [1, 2, 3, 4, 5, 6]), + (1720632073.350096, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720222983.951049, [], [1, 2, 3, 4, 5, 6]), (1720416117.193584, [], [1, 2, 3, 4, 5, 6])], + [ + (1719971140.695348, [], [1, 2, 3, 4, 5, 6]), + (1720135102.372106, [], [1, 2, 3, 4, 5, 6]), + (1720241528.560118, [], [1, 2, 3, 4, 5, 6]), + (1720494221.442123, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720321074.976216, [], [1, 2, 3, 4, 5, 6]), (1720409891.326419, [], [1, 2, 3, 4, 5, 6])], + [ + (1720048681.074873, [], [1, 2, 3, 4, 5, 6]), + (1720048799.279747, [], [1, 2, 3, 4, 5, 6]), + (1720048801.388645, [], [1, 2, 3, 4, 5, 6]), + (1720078114.283264, [], [1, 2, 3, 4, 5, 6]), + (1720078331.228227, [], [1, 2, 3, 4, 5, 6]), + (1720143813.123392, [], [1, 2, 3, 4, 5, 6]), + (1720143818.771163, [], [1, 2, 3, 4, 5, 6]), + (1720235193.521271, [], [1, 2, 3, 4, 5, 6]), + (1720235195.596896, [], [1, 2, 3, 4, 5, 6]), + (1720325140.395885, [], [1, 2, 3, 4, 5, 6]), + (1720325267.478348, [], [1, 2, 3, 4, 5, 6]), + (1720411316.426439, [], [1, 2, 3, 4, 5, 6]), + (1720411410.991238, [], [1, 2, 3, 4, 5, 6]), + (1720411413.049352, [], [1, 2, 3, 4, 5, 6]), + (1720585972.027756, [], [1, 2, 3, 4, 5, 6]), + (1720586043.355429, [], [1, 2, 3, 4, 5, 6]), + (1720586045.457795, [], [1, 2, 3, 4, 5, 6]), + (1720615162.541609, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719966754.275194, [], [1, 2, 3, 4, 5, 6]), + (1720490216.464205, [], [1, 2, 3, 4, 5, 6]), + (1720553382.681907, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720002644.294528, [], [1, 2, 3, 4, 5, 6]), (1720230090.842106, [], [1, 2, 3, 4, 5, 6])], + [(1720146769.696788, [], [1, 2, 3, 4, 5, 6])], + [ + (1720165756.310512, [], [1, 2, 3, 4, 5, 6]), + (1720410912.566749, [], [1, 2, 3, 4, 5, 6]), + (1720570647.832366, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719976353.576494, [], [1, 2, 3, 4, 5, 6]), + (1720377185.297147, [], [1, 2, 3, 4, 5, 6]), + (1720377289.215042, [], [1, 2, 3, 4, 5, 6]), + (1720377308.468995, [], [1, 2, 3, 4, 5, 6]), + (1720377428.407697, [], [1, 2, 3, 4, 5, 6]), + (1720377485.735576, [], [1, 2, 3, 4, 5, 6]), + (1720377529.508166, [], [1, 2, 3, 4, 5, 6]), + (1720377719.383399, [], [1, 2, 3, 4, 5, 6]), + (1720377809.666048, [], [1, 2, 3, 4, 5, 6]), + (1720377903.918773, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719980213.693083, [], [1, 2, 3, 4, 5, 6]), (1720140384.252931, [], [1, 2, 3, 4, 5, 6])], + [(1720063871.378525, [], [1, 2, 3, 4, 5, 6]), (1720573199.935932, [], [1, 2, 3, 4, 5, 6])], + [(1719985832.719319, [], [1, 2, 3, 4, 5, 6]), (1720406386.34727, [], [1, 2, 3, 4, 5, 6])], + [(1720235695.246766, [], [1, 2, 3, 4, 5, 6]), (1720397629.747797, [], [1, 2, 3, 4, 5, 6])], + [ + (1719968874.938189, [], [1, 2, 3, 4, 5, 6]), + (1719969009.652547, [], [1, 2, 3, 4, 5, 6]), + (1719985700.913806, [], [1, 2, 3, 4, 5, 6]), + (1719985791.512554, [], [1, 2, 3, 4, 5, 6]), + (1720148300.05719, [], [1, 2, 3, 4, 5, 6]), + (1720148304.627225, [], [1, 2, 3, 4, 5, 6]), + (1720148306.685703, [], [1, 2, 3, 4, 5, 6]), + (1720227741.213642, [], [1, 2, 3, 4, 5, 6]), + (1720227808.775173, [], [1, 2, 3, 4, 5, 6]), + (1720295172.811284, [], [1, 2, 3, 4, 5, 6]), + (1720295262.745855, [], [1, 2, 3, 4, 5, 6]), + (1720295264.827116, [], [1, 2, 3, 4, 5, 6]), + (1720295269.130924, [], [1, 2, 3, 4, 5, 6]), + (1720295271.214758, [], [1, 2, 3, 4, 5, 6]), + (1720295276.000757, [], [1, 2, 3, 4, 5, 6]), + (1720295278.050173, [], [1, 2, 3, 4, 5, 6]), + (1720295281.951474, [], [1, 2, 3, 4, 5, 6]), + (1720314791.722567, [], [1, 2, 3, 4, 5, 6]), + (1720314793.809493, [], [1, 2, 3, 4, 5, 6]), + (1720314794.091414, [], [1, 2, 3, 4, 5, 6]), + (1720314796.180282, [], [1, 2, 3, 4, 5, 6]), + (1720334621.693568, [], [1, 2, 3, 4, 5, 6]), + (1720334651.208509, [], [1, 2, 3, 4, 5, 6]), + (1720334653.289286, [], [1, 2, 3, 4, 5, 6]), + (1720405990.429231, [], [1, 2, 3, 4, 5, 6]), + (1720406072.368399, [], [1, 2, 3, 4, 5, 6]), + (1720406074.479232, [], [1, 2, 3, 4, 5, 6]), + (1720502938.778116, [], [1, 2, 3, 4, 5, 6]), + (1720502940.865312, [], [1, 2, 3, 4, 5, 6]), + (1720575136.293441, [], [1, 2, 3, 4, 5, 6]), + (1720575193.500487, [], [1, 2, 3, 4, 5, 6]), + (1720575195.593287, [], [1, 2, 3, 4, 5, 6]), + (1720584066.099625, [], [1, 2, 3, 4, 5, 6]), + (1720584068.181406, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720401234.664166, [], [1, 2, 3, 4, 5, 6])], + [(1720338995.593562, [], [1, 2, 3, 4, 5, 6]), (1720569421.058122, [], [1, 2, 3, 4, 5, 6])], + [ + (1720152780.103003, [], [1, 2, 3, 4, 5, 6]), + (1720318021.21162, [], [1, 2, 3, 4, 5, 6]), + (1720568682.771219, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720153767.474128, [], [1, 2, 3, 4, 5, 6]), + (1720153989.99445, [], [1, 2, 3, 4, 5, 6]), + (1720154117.339532, [], [1, 2, 3, 4, 5, 6]), + (1720154188.47243, [], [1, 2, 3, 4, 5, 6]), + (1720227169.047901, [], [1, 2, 3, 4, 5, 6]), + (1720227171.113334, [], [1, 2, 3, 4, 5, 6]), + (1720290328.719511, [], [1, 2, 3, 4, 5, 6]), + (1720290370.18464, [], [1, 2, 3, 4, 5, 6]), + (1720308949.583306, [], [1, 2, 3, 4, 5, 6]), + (1720309021.205367, [], [1, 2, 3, 4, 5, 6]), + (1720309023.255742, [], [1, 2, 3, 4, 5, 6]), + (1720400031.238045, [], [1, 2, 3, 4, 5, 6]), + (1720400050.534361, [], [1, 2, 3, 4, 5, 6]), + (1720400052.591865, [], [1, 2, 3, 4, 5, 6]), + (1720475517.057002, [], [1, 2, 3, 4, 5, 6]), + (1720475631.221119, [], [1, 2, 3, 4, 5, 6]), + (1720489341.891834, [], [1, 2, 3, 4, 5, 6]), + (1720489520.813888, [], [1, 2, 3, 4, 5, 6]), + (1720522911.41822, [], [1, 2, 3, 4, 5, 6]), + (1720522980.415637, [], [1, 2, 3, 4, 5, 6]), + (1720522982.44762, [], [1, 2, 3, 4, 5, 6]), + (1720590435.585175, [], [1, 2, 3, 4, 5, 6]), + (1720590601.259611, [], [1, 2, 3, 4, 5, 6]), + (1720590603.315457, [], [1, 2, 3, 4, 5, 6]), + (1720590604.638539, [], [1, 2, 3, 4, 5, 6]), + (1720590606.730642, [], [1, 2, 3, 4, 5, 6]), + (1720612053.860624, [], [1, 2, 3, 4, 5, 6]), + (1720612129.921877, [], [1, 2, 3, 4, 5, 6]), + (1720612132.011818, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720426949.173587, [], [1, 2, 3, 4, 5, 6])], + [(1720569102.034496, [], [1, 2, 3, 4, 5, 6])], + [(1720034790.744638, [], [1, 2, 3, 4, 5, 6]), (1720307075.973997, [], [1, 2, 3, 4, 5, 6])], + [ + (1720226287.705985, [], [1, 2, 3, 4, 5, 6]), + (1720397684.607266, [], [1, 2, 3, 4, 5, 6]), + (1720488997.884315, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720228730.18975, [], [1, 2, 3, 4, 5, 6]), + (1720407495.769529, [], [1, 2, 3, 4, 5, 6]), + (1720486995.921451, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720489080.310875, [], [1, 2, 3, 4, 5, 6]), (1720579311.992295, [], [1, 2, 3, 4, 5, 6])], + [(1720138007.433538, [], [1, 2, 3, 4, 5, 6]), (1720243741.609597, [], [1, 2, 3, 4, 5, 6])], + [(1720528666.459324, [], [1, 2, 3, 4, 5, 6])], + [(1719970772.701878, [], [1, 2, 3, 4, 5, 6]), (1720141632.061506, [], [1, 2, 3, 4, 5, 6])], + [(1720068110.038208, [], [1, 2, 3, 4, 5, 6])], + [(1720047191.032235, [], [1, 2, 3, 4, 5, 6])], + [(1719976436.118248, [], [1, 2, 3, 4, 5, 6]), (1720307037.853977, [], [1, 2, 3, 4, 5, 6])], + [(1719972036.639217, [], [1, 2, 3, 4, 5, 6]), (1720057689.829017, [], [1, 2, 3, 4, 5, 6])], + [(1720110461.39165, [], [1, 2, 3, 4, 5, 6]), (1720507249.36072, [], [1, 2, 3, 4, 5, 6])], + [(1719973197.847086, [], [1, 2, 3, 4, 5, 6])], + [ + (1720069338.721539, [], [1, 2, 3, 4, 5, 6]), + (1720524537.017222, [], [1, 2, 3, 4, 5, 6]), + (1720603176.268707, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720142471.621654, [], [1, 2, 3, 4, 5, 6]), (1720221877.173433, [], [1, 2, 3, 4, 5, 6])], + [(1720141771.947396, [], [1, 2, 3, 4, 5, 6])], + [(1720563222.366935, [], [1, 2, 3, 4, 5, 6])], + [(1720237444.558492, [], [1, 2, 3, 4, 5, 6]), (1720586951.821255, [], [1, 2, 3, 4, 5, 6])], + [ + (1720066787.226665, [], [1, 2, 3, 4, 5, 6]), + (1720138501.376918, [], [1, 2, 3, 4, 5, 6]), + (1720332897.490345, [], [1, 2, 3, 4, 5, 6]), + (1720498267.095353, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719979972.999784, [], [1, 2, 3, 4, 5, 6]), (1720326547.318274, [], [1, 2, 3, 4, 5, 6])], + [(1720156142.458551, [], [1, 2, 3, 4, 5, 6]), (1720222674.830191, [], [1, 2, 3, 4, 5, 6])], + [ + (1719976247.399205, [], [1, 2, 3, 4, 5, 6]), + (1719976421.138728, [], [1, 2, 3, 4, 5, 6]), + (1719976457.121053, [], [1, 2, 3, 4, 5, 6]), + (1719976566.522486, [], [1, 2, 3, 4, 5, 6]), + (1720060116.807539, [], [1, 2, 3, 4, 5, 6]), + (1720142201.528128, [], [1, 2, 3, 4, 5, 6]), + (1720142320.790244, [], [1, 2, 3, 4, 5, 6]), + (1720142322.873716, [], [1, 2, 3, 4, 5, 6]), + (1720142323.209429, [], [1, 2, 3, 4, 5, 6]), + (1720142325.32365, [], [1, 2, 3, 4, 5, 6]), + (1720158309.47212, [], [1, 2, 3, 4, 5, 6]), + (1720158464.699924, [], [1, 2, 3, 4, 5, 6]), + (1720158466.803077, [], [1, 2, 3, 4, 5, 6]), + (1720235827.780639, [], [1, 2, 3, 4, 5, 6]), + (1720235829.873017, [], [1, 2, 3, 4, 5, 6]), + (1720235831.516786, [], [1, 2, 3, 4, 5, 6]), + (1720235833.64015, [], [1, 2, 3, 4, 5, 6]), + (1720308111.792929, [], [1, 2, 3, 4, 5, 6]), + (1720308113.917634, [], [1, 2, 3, 4, 5, 6]), + (1720330424.153222, [], [1, 2, 3, 4, 5, 6]), + (1720330426.274619, [], [1, 2, 3, 4, 5, 6]), + (1720397440.529792, [], [1, 2, 3, 4, 5, 6]), + (1720397517.527169, [], [1, 2, 3, 4, 5, 6]), + (1720397519.567891, [], [1, 2, 3, 4, 5, 6]), + (1720489794.692916, [], [1, 2, 3, 4, 5, 6]), + (1720489888.559008, [], [1, 2, 3, 4, 5, 6]), + (1720489890.678539, [], [1, 2, 3, 4, 5, 6]), + (1720577363.385966, [], [1, 2, 3, 4, 5, 6]), + (1720577493.034855, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720042932.668232, [], [1, 2, 3, 4, 5, 6])], + [ + (1720059740.224971, [], [1, 2, 3, 4, 5, 6]), + (1720141837.883794, [], [1, 2, 3, 4, 5, 6]), + (1720405792.751871, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720050443.838424, [], [1, 2, 3, 4, 5, 6]), (1720124719.146549, [], [1, 2, 3, 4, 5, 6])], + [(1719974887.67444, [], [1, 2, 3, 4, 5, 6]), (1720568466.68215, [], [1, 2, 3, 4, 5, 6])], + [(1720065592.314345, [], [1, 2, 3, 4, 5, 6]), (1720205463.888972, [], [1, 2, 3, 4, 5, 6])], + [ + (1720226332.701569, [], [1, 2, 3, 4, 5, 6]), + (1720396620.155135, [], [1, 2, 3, 4, 5, 6]), + (1720492327.218299, [], [1, 2, 3, 4, 5, 6]), + (1720574416.447233, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720045016.618738, [], [1, 2, 3, 4, 5, 6])], + [ + (1720041923.403398, [], [1, 2, 3, 4, 5, 6]), + (1720041925.529856, [], [1, 2, 3, 4, 5, 6]), + (1720041929.344695, [], [1, 2, 3, 4, 5, 6]), + (1720071082.001532, [], [1, 2, 3, 4, 5, 6]), + (1720071201.167727, [], [1, 2, 3, 4, 5, 6]), + (1720071203.259989, [], [1, 2, 3, 4, 5, 6]), + (1720113938.478909, [], [1, 2, 3, 4, 5, 6]), + (1720114019.141219, [], [1, 2, 3, 4, 5, 6]), + (1720147954.114345, [], [1, 2, 3, 4, 5, 6]), + (1720148058.186186, [], [1, 2, 3, 4, 5, 6]), + (1720148060.244565, [], [1, 2, 3, 4, 5, 6]), + (1720230463.28606, [], [1, 2, 3, 4, 5, 6]), + (1720230465.339869, [], [1, 2, 3, 4, 5, 6]), + (1720306384.513301, [], [1, 2, 3, 4, 5, 6]), + (1720306386.603107, [], [1, 2, 3, 4, 5, 6]), + (1720327575.975525, [], [1, 2, 3, 4, 5, 6]), + (1720327821.751969, [], [1, 2, 3, 4, 5, 6]), + (1720327823.781901, [], [1, 2, 3, 4, 5, 6]), + (1720410348.159738, [], [1, 2, 3, 4, 5, 6]), + (1720410448.341114, [], [1, 2, 3, 4, 5, 6]), + (1720457570.237639, [], [1, 2, 3, 4, 5, 6]), + (1720457731.119754, [], [1, 2, 3, 4, 5, 6]), + (1720457733.248545, [], [1, 2, 3, 4, 5, 6]), + (1720499264.385485, [], [1, 2, 3, 4, 5, 6]), + (1720499470.033411, [], [1, 2, 3, 4, 5, 6]), + (1720499472.085357, [], [1, 2, 3, 4, 5, 6]), + (1720543986.94937, [], [1, 2, 3, 4, 5, 6]), + (1720570525.581032, [], [1, 2, 3, 4, 5, 6]), + (1720570749.619565, [], [1, 2, 3, 4, 5, 6]), + (1720585164.854344, [], [1, 2, 3, 4, 5, 6]), + (1720585249.748529, [], [1, 2, 3, 4, 5, 6]), + (1720585251.810485, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720489237.597076, [], [1, 2, 3, 4, 5, 6]), + (1720489298.656835, [], [1, 2, 3, 4, 5, 6]), + (1720489340.853931, [], [1, 2, 3, 4, 5, 6]), + (1720489414.715662, [], [1, 2, 3, 4, 5, 6]), + (1720489578.362748, [], [1, 2, 3, 4, 5, 6]), + (1720489722.080922, [], [1, 2, 3, 4, 5, 6]), + (1720489846.161597, [], [1, 2, 3, 4, 5, 6]), + (1720489902.616032, [], [1, 2, 3, 4, 5, 6]), + (1720489979.179271, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720226541.700259, [], [1, 2, 3, 4, 5, 6]), (1720459357.712795, [], [1, 2, 3, 4, 5, 6])], + [ + (1720137147.179349, [], [1, 2, 3, 4, 5, 6]), + (1720241160.346244, [], [1, 2, 3, 4, 5, 6]), + (1720393844.000636, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720233961.944441, [], [1, 2, 3, 4, 5, 6]), (1720475993.227865, [], [1, 2, 3, 4, 5, 6])], + [(1720339946.523179, [], [1, 2, 3, 4, 5, 6])], + [(1720314335.142585, [], [1, 2, 3, 4, 5, 6])], + [(1720307070.122528, [], [1, 2, 3, 4, 5, 6]), (1720396806.823553, [], [1, 2, 3, 4, 5, 6])], + [(1720134885.254524, [], [1, 2, 3, 4, 5, 6]), (1720582472.172677, [], [1, 2, 3, 4, 5, 6])], + [ + (1720064989.52196, [], [1, 2, 3, 4, 5, 6]), + (1720317471.54011, [], [1, 2, 3, 4, 5, 6]), + (1720405569.646675, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720230240.375249, [], [1, 2, 3, 4, 5, 6])], + [ + (1719981989.67667, [], [1, 2, 3, 4, 5, 6]), + (1719982209.356573, [], [1, 2, 3, 4, 5, 6]), + (1719982211.412871, [], [1, 2, 3, 4, 5, 6]), + (1719990729.511533, [], [1, 2, 3, 4, 5, 6]), + (1719990731.571772, [], [1, 2, 3, 4, 5, 6]), + (1720052018.486278, [], [1, 2, 3, 4, 5, 6]), + (1720052084.895512, [], [1, 2, 3, 4, 5, 6]), + (1720052086.959136, [], [1, 2, 3, 4, 5, 6]), + (1720063752.458627, [], [1, 2, 3, 4, 5, 6]), + (1720063775.402517, [], [1, 2, 3, 4, 5, 6]), + (1720131365.952047, [], [1, 2, 3, 4, 5, 6]), + (1720131512.217778, [], [1, 2, 3, 4, 5, 6]), + (1720131514.344434, [], [1, 2, 3, 4, 5, 6]), + (1720155954.588913, [], [1, 2, 3, 4, 5, 6]), + (1720156049.221823, [], [1, 2, 3, 4, 5, 6]), + (1720326057.45718, [], [1, 2, 3, 4, 5, 6]), + (1720404760.882693, [], [1, 2, 3, 4, 5, 6]), + (1720404806.729924, [], [1, 2, 3, 4, 5, 6]), + (1720404808.834418, [], [1, 2, 3, 4, 5, 6]), + (1720416517.018963, [], [1, 2, 3, 4, 5, 6]), + (1720494367.532053, [], [1, 2, 3, 4, 5, 6]), + (1720500247.551019, [], [1, 2, 3, 4, 5, 6]), + (1720500294.606063, [], [1, 2, 3, 4, 5, 6]), + (1720500296.694825, [], [1, 2, 3, 4, 5, 6]), + (1720500299.259697, [], [1, 2, 3, 4, 5, 6]), + (1720500301.365635, [], [1, 2, 3, 4, 5, 6]), + (1720572338.244531, [], [1, 2, 3, 4, 5, 6]), + (1720572428.794186, [], [1, 2, 3, 4, 5, 6]), + (1720572430.860491, [], [1, 2, 3, 4, 5, 6]), + (1720600877.354363, [], [1, 2, 3, 4, 5, 6]), + (1720601092.109844, [], [1, 2, 3, 4, 5, 6]), + (1720601094.164843, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720230142.987461, [], [1, 2, 3, 4, 5, 6])], + [(1720040834.068992, [], [1, 2, 3, 4, 5, 6])], + [ + (1719984036.646786, [], [1, 2, 3, 4, 5, 6]), + (1720138775.775437, [], [1, 2, 3, 4, 5, 6]), + (1720337436.06649, [], [1, 2, 3, 4, 5, 6]), + (1720567415.54222, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720501680.278938, [], [1, 2, 3, 4, 5, 6]), (1720645969.459141, [], [1, 2, 3, 4, 5, 6])], + [(1719981148.135955, [], [1, 2, 3, 4, 5, 6]), (1720574648.013669, [], [1, 2, 3, 4, 5, 6])], + [ + (1719982323.222591, [], [1, 2, 3, 4, 5, 6]), + (1720101646.380659, [], [1, 2, 3, 4, 5, 6]), + (1720493833.121559, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719977634.84991, [], [1, 2, 3, 4, 5, 6])], + [(1720494761.805988, [], [1, 2, 3, 4, 5, 6]), (1720595943.849674, [], [1, 2, 3, 4, 5, 6])], + [ + (1720155432.624618, [], [1, 2, 3, 4, 5, 6]), + (1720500643.020756, [], [1, 2, 3, 4, 5, 6]), + (1720584683.624928, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720245035.196003, [], [1, 2, 3, 4, 5, 6])], + [(1720313673.855042, [], [1, 2, 3, 4, 5, 6]), (1720381149.495776, [], [1, 2, 3, 4, 5, 6])], + [(1720019520.657419, [], [1, 2, 3, 4, 5, 6])], + [(1719981920.434976, [], [1, 2, 3, 4, 5, 6]), (1720128572.036838, [], [1, 2, 3, 4, 5, 6])], + [(1720136202.220094, [], [1, 2, 3, 4, 5, 6]), (1720279940.922158, [], [1, 2, 3, 4, 5, 6])], + [(1720462395.987553, [], [1, 2, 3, 4, 5, 6])], + [ + (1720653383.244, [], [1, 2, 3, 4, 5, 6]), + (1720653383.325, [], [1, 2, 3, 4, 5, 6]), + (1720653391.627, [], [1, 2, 3, 4, 5, 6]), + (1720653392.102, [], [1, 2, 3, 4, 5, 6]), + (1720653392.298, [], [1, 2, 3, 4, 5, 6]), + (1720653394.934, [], [1, 2, 3, 4, 5, 6]), + (1720653396.411, [], [1, 2, 3, 4, 5, 6]), + (1720653433.093, [], [1, 2, 3, 4, 5, 6]), + (1720653433.236, [], [1, 2, 3, 4, 5, 6]), + (1720653434.991, [], [1, 2, 3, 4, 5, 6]), + (1720653435.037, [], [1, 2, 3, 4, 5, 6]), + (1720653501.654, [], [1, 2, 3, 4, 5, 6]), + (1720653501.71, [], [1, 2, 3, 4, 5, 6]), + (1720653504.799, [], [1, 2, 3, 4, 5, 6]), + (1720653506.446, [], [1, 2, 3, 4, 5, 6]), + (1720653507.872, [], [1, 2, 3, 4, 5, 6]), + (1720654003.023, [], [1, 2, 3, 4, 5, 6]), + (1720654003.148, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719977539.575178, [], [1, 2, 3, 4, 5, 6]), + (1720223726.13705, [], [1, 2, 3, 4, 5, 6]), + (1720396336.894644, [], [1, 2, 3, 4, 5, 6]), + (1720587683.68083, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720327049.710872, [], [1, 2, 3, 4, 5, 6])], + [(1720223003.678086, [], [1, 2, 3, 4, 5, 6]), (1720320656.874702, [], [1, 2, 3, 4, 5, 6])], + [(1720149475.628567, [], [1, 2, 3, 4, 5, 6]), (1720228859.277837, [], [1, 2, 3, 4, 5, 6])], + [(1720049864.230497, [], [1, 2, 3, 4, 5, 6])], + [(1720198432.201466, [], [1, 2, 3, 4, 5, 6])], + [ + (1719975613.439717, [], [1, 2, 3, 4, 5, 6]), + (1719975648.049123, [], [1, 2, 3, 4, 5, 6]), + (1720056914.90013, [], [1, 2, 3, 4, 5, 6]), + (1720057026.542911, [], [1, 2, 3, 4, 5, 6]), + (1720061926.526142, [], [1, 2, 3, 4, 5, 6]), + (1720062007.614611, [], [1, 2, 3, 4, 5, 6]), + (1720147419.43368, [], [1, 2, 3, 4, 5, 6]), + (1720147421.485277, [], [1, 2, 3, 4, 5, 6]), + (1720234139.651394, [], [1, 2, 3, 4, 5, 6]), + (1720234141.758276, [], [1, 2, 3, 4, 5, 6]), + (1720315191.984726, [], [1, 2, 3, 4, 5, 6]), + (1720315194.093018, [], [1, 2, 3, 4, 5, 6]), + (1720315195.836394, [], [1, 2, 3, 4, 5, 6]), + (1720395738.54726, [], [1, 2, 3, 4, 5, 6]), + (1720395740.684533, [], [1, 2, 3, 4, 5, 6]), + (1720410342.218884, [], [1, 2, 3, 4, 5, 6]), + (1720410455.568303, [], [1, 2, 3, 4, 5, 6]), + (1720496479.412713, [], [1, 2, 3, 4, 5, 6]), + (1720496636.329168, [], [1, 2, 3, 4, 5, 6]), + (1720568810.362519, [], [1, 2, 3, 4, 5, 6]), + (1720569040.475975, [], [1, 2, 3, 4, 5, 6]), + (1720652557.884167, [], [1, 2, 3, 4, 5, 6]), + (1720652630.129755, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720233899.203213, [], [1, 2, 3, 4, 5, 6]), + (1720463796.00711, [], [1, 2, 3, 4, 5, 6]), + (1720567454.878169, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720218589.331391, [], [1, 2, 3, 4, 5, 6]), + (1720572246.733219, [], [1, 2, 3, 4, 5, 6]), + (1720585861.133309, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720030179.060188, [], [1, 2, 3, 4, 5, 6]), + (1720330759.17762, [], [1, 2, 3, 4, 5, 6]), + (1720494515.69797, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719979902.797865, [], [1, 2, 3, 4, 5, 6])], + [ + (1720070638.173492, [], [1, 2, 3, 4, 5, 6]), + (1720070819.083453, [], [1, 2, 3, 4, 5, 6]), + (1720070899.802295, [], [1, 2, 3, 4, 5, 6]), + (1720318683.767078, [], [1, 2, 3, 4, 5, 6]), + (1720318886.533145, [], [1, 2, 3, 4, 5, 6]), + (1720318966.57212, [], [1, 2, 3, 4, 5, 6]), + (1720318995.968059, [], [1, 2, 3, 4, 5, 6]), + (1720319139.50433, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720146380.443011, [], [1, 2, 3, 4, 5, 6]), (1720567703.854081, [], [1, 2, 3, 4, 5, 6])], + [(1720485224.936769, [], [1, 2, 3, 4, 5, 6])], + [(1720039180.636756, [], [1, 2, 3, 4, 5, 6]), (1720320703.675688, [], [1, 2, 3, 4, 5, 6])], + [(1720572877.111346, [], [1, 2, 3, 4, 5, 6])], + [(1720149009.624794, [], [1, 2, 3, 4, 5, 6])], + [ + (1720146066.037668, [], [1, 2, 3, 4, 5, 6]), + (1720233446.585623, [], [1, 2, 3, 4, 5, 6]), + (1720397647.223612, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720071535.98588, [], [1, 2, 3, 4, 5, 6]), + (1720222696.675857, [], [1, 2, 3, 4, 5, 6]), + (1720581710.534385, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720320564.297259, [], [1, 2, 3, 4, 5, 6])], + [(1720221596.179609, [], [1, 2, 3, 4, 5, 6]), (1720502714.197196, [], [1, 2, 3, 4, 5, 6])], + [(1720553799.408143, [], [1, 2, 3, 4, 5, 6])], + [ + (1720244362.654861, [], [1, 2, 3, 4, 5, 6]), + (1720412405.21556, [], [1, 2, 3, 4, 5, 6]), + (1720566429.648086, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720225993.003411, [], [1, 2, 3, 4, 5, 6]), (1720576860.143664, [], [1, 2, 3, 4, 5, 6])], + [(1720050680.82043, [], [1, 2, 3, 4, 5, 6])], + [(1719979231.190542, [], [1, 2, 3, 4, 5, 6]), (1720395944.084001, [], [1, 2, 3, 4, 5, 6])], + [(1720483644.896944, [], [1, 2, 3, 4, 5, 6])], + [ + (1720238837.512808, [], [1, 2, 3, 4, 5, 6]), + (1720400917.965225, [], [1, 2, 3, 4, 5, 6]), + (1720499924.896186, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719978670.650293, [], [1, 2, 3, 4, 5, 6]), (1720312908.844749, [], [1, 2, 3, 4, 5, 6])], + [ + (1720064475.615309, [], [1, 2, 3, 4, 5, 6]), + (1720311005.200102, [], [1, 2, 3, 4, 5, 6]), + (1720398033.682041, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719974842.261587, [], [1, 2, 3, 4, 5, 6]), + (1720070482.809945, [], [1, 2, 3, 4, 5, 6]), + (1720492054.306253, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719985346.86067, [], [1, 2, 3, 4, 5, 6])], + [(1720494586.311794, [], [1, 2, 3, 4, 5, 6])], + [ + (1720151598.2787, [], [1, 2, 3, 4, 5, 6]), + (1720243336.399964, [], [1, 2, 3, 4, 5, 6]), + (1720394460.006175, [], [1, 2, 3, 4, 5, 6]), + (1720584803.786632, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720044414.812182, [], [1, 2, 3, 4, 5, 6]), (1720161374.32603, [], [1, 2, 3, 4, 5, 6])], + [(1720242818.837344, [], [1, 2, 3, 4, 5, 6])], + [(1720407806.948096, [], [1, 2, 3, 4, 5, 6]), (1720511793.967125, [], [1, 2, 3, 4, 5, 6])], + [(1720177465.166679, [], [1, 2, 3, 4, 5, 6])], + [(1720152899.613121, [], [1, 2, 3, 4, 5, 6])], + [(1720144169.768087, [], [1, 2, 3, 4, 5, 6])], + [(1720050900.326256, [], [1, 2, 3, 4, 5, 6]), (1720592651.789908, [], [1, 2, 3, 4, 5, 6])], + [(1720136535.399876, [], [1, 2, 3, 4, 5, 6]), (1720345016.561725, [], [1, 2, 3, 4, 5, 6])], + [(1720148677.039505, [], [1, 2, 3, 4, 5, 6])], + [(1720103982.765975, [], [1, 2, 3, 4, 5, 6]), (1720223275.492349, [], [1, 2, 3, 4, 5, 6])], + [(1719966246.265247, [], [1, 2, 3, 4, 5, 6])], + [(1720048787.249996, [], [1, 2, 3, 4, 5, 6]), (1720588475.186395, [], [1, 2, 3, 4, 5, 6])], + [ + (1720406823.932911, [], [1, 2, 3, 4, 5, 6]), + (1720406854.864424, [], [1, 2, 3, 4, 5, 6]), + (1720406898.943281, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720106989.608336, [], [1, 2, 3, 4, 5, 6])], + [(1719965156.233365, [], [1, 2, 3, 4, 5, 6]), (1720156113.65034, [], [1, 2, 3, 4, 5, 6])], + [(1720237894.767081, [], [1, 2, 3, 4, 5, 6])], + [ + (1720236335.89358, [], [1, 2, 3, 4, 5, 6]), + (1720311377.453215, [], [1, 2, 3, 4, 5, 6]), + (1720406308.416613, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720580297.715893, [], [1, 2, 3, 4, 5, 6])], + [(1719983515.156201, [], [1, 2, 3, 4, 5, 6]), (1720243011.26844, [], [1, 2, 3, 4, 5, 6])], + [(1720412740.206646, [], [1, 2, 3, 4, 5, 6])], + [(1720573676.882026, [], [1, 2, 3, 4, 5, 6])], + [(1720069113.016836, [], [1, 2, 3, 4, 5, 6])], + [(1720065156.88711, [], [1, 2, 3, 4, 5, 6]), (1720342013.62189, [], [1, 2, 3, 4, 5, 6])], + [(1720414414.37316, [], [1, 2, 3, 4, 5, 6]), (1720576057.542994, [], [1, 2, 3, 4, 5, 6])], + [ + (1719965980.977528, [], [1, 2, 3, 4, 5, 6]), + (1720328208.291947, [], [1, 2, 3, 4, 5, 6]), + (1720586256.843288, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719993285.557781, [], [1, 2, 3, 4, 5, 6])], + [ + (1720157474.360894, [], [1, 2, 3, 4, 5, 6]), + (1720317049.692797, [], [1, 2, 3, 4, 5, 6]), + (1720418157.354486, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720492206.117992, [], [1, 2, 3, 4, 5, 6])], + [(1720566094.344648, [], [1, 2, 3, 4, 5, 6])], + [(1719974058.930323, [], [1, 2, 3, 4, 5, 6]), (1720059173.893018, [], [1, 2, 3, 4, 5, 6])], + [(1720517061.661773, [], [1, 2, 3, 4, 5, 6])], + [(1720052300.009912, [], [1, 2, 3, 4, 5, 6]), (1720447926.535749, [], [1, 2, 3, 4, 5, 6])], + [(1720060153.321408, [], [1, 2, 3, 4, 5, 6]), (1720498576.79341, [], [1, 2, 3, 4, 5, 6])], + [(1720415193.154478, [], [1, 2, 3, 4, 5, 6]), (1720494529.74019, [], [1, 2, 3, 4, 5, 6])], + [ + (1719980354.732889, [], [1, 2, 3, 4, 5, 6]), + (1719980356.852338, [], [1, 2, 3, 4, 5, 6]), + (1719980359.805901, [], [1, 2, 3, 4, 5, 6]), + (1719980361.898886, [], [1, 2, 3, 4, 5, 6]), + (1719980364.204398, [], [1, 2, 3, 4, 5, 6]), + (1720063622.210305, [], [1, 2, 3, 4, 5, 6]), + (1720063766.011158, [], [1, 2, 3, 4, 5, 6]), + (1720063768.060823, [], [1, 2, 3, 4, 5, 6]), + (1720134932.57792, [], [1, 2, 3, 4, 5, 6]), + (1720148426.91756, [], [1, 2, 3, 4, 5, 6]), + (1720148428.987966, [], [1, 2, 3, 4, 5, 6]), + (1720237634.85931, [], [1, 2, 3, 4, 5, 6]), + (1720237687.961173, [], [1, 2, 3, 4, 5, 6]), + (1720321596.679301, [], [1, 2, 3, 4, 5, 6]), + (1720394727.592533, [], [1, 2, 3, 4, 5, 6]), + (1720394743.278857, [], [1, 2, 3, 4, 5, 6]), + (1720408408.443408, [], [1, 2, 3, 4, 5, 6]), + (1720419213.527306, [], [1, 2, 3, 4, 5, 6]), + (1720419321.981, [], [1, 2, 3, 4, 5, 6]), + (1720419324.073269, [], [1, 2, 3, 4, 5, 6]), + (1720497577.385151, [], [1, 2, 3, 4, 5, 6]), + (1720497694.789568, [], [1, 2, 3, 4, 5, 6]), + (1720497696.883431, [], [1, 2, 3, 4, 5, 6]), + (1720584999.597212, [], [1, 2, 3, 4, 5, 6]), + (1720585001.687849, [], [1, 2, 3, 4, 5, 6]), + (1720585005.063862, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720134251.830614, [], [1, 2, 3, 4, 5, 6]), + (1720407054.526951, [], [1, 2, 3, 4, 5, 6]), + (1720543564.686466, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720395362.215692, [], [1, 2, 3, 4, 5, 6]), (1720500480.122778, [], [1, 2, 3, 4, 5, 6])], + [ + (1720058412.695383, [], [1, 2, 3, 4, 5, 6]), + (1720228775.865928, [], [1, 2, 3, 4, 5, 6]), + (1720503282.31697, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720614350.980502, [], [1, 2, 3, 4, 5, 6])], + [ + (1719982519.149294, [], [1, 2, 3, 4, 5, 6]), + (1719982589.903207, [], [1, 2, 3, 4, 5, 6]), + (1719982591.966235, [], [1, 2, 3, 4, 5, 6]), + (1719982594.824529, [], [1, 2, 3, 4, 5, 6]), + (1720056059.106374, [], [1, 2, 3, 4, 5, 6]), + (1720056099.353137, [], [1, 2, 3, 4, 5, 6]), + (1720056101.438857, [], [1, 2, 3, 4, 5, 6]), + (1720084141.137944, [], [1, 2, 3, 4, 5, 6]), + (1720084195.578773, [], [1, 2, 3, 4, 5, 6]), + (1720136869.202173, [], [1, 2, 3, 4, 5, 6]), + (1720223064.342828, [], [1, 2, 3, 4, 5, 6]), + (1720223120.591787, [], [1, 2, 3, 4, 5, 6]), + (1720223122.696149, [], [1, 2, 3, 4, 5, 6]), + (1720239556.237398, [], [1, 2, 3, 4, 5, 6]), + (1720239630.045363, [], [1, 2, 3, 4, 5, 6]), + (1720239632.137037, [], [1, 2, 3, 4, 5, 6]), + (1720312988.468776, [], [1, 2, 3, 4, 5, 6]), + (1720313161.594176, [], [1, 2, 3, 4, 5, 6]), + (1720313163.656358, [], [1, 2, 3, 4, 5, 6]), + (1720413652.862676, [], [1, 2, 3, 4, 5, 6]), + (1720413773.395596, [], [1, 2, 3, 4, 5, 6]), + (1720484458.010065, [], [1, 2, 3, 4, 5, 6]), + (1720484503.114542, [], [1, 2, 3, 4, 5, 6]), + (1720484505.173957, [], [1, 2, 3, 4, 5, 6]), + (1720570920.862746, [], [1, 2, 3, 4, 5, 6]), + (1720571065.994777, [], [1, 2, 3, 4, 5, 6]), + (1720571068.086575, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720138634.579801, [], [1, 2, 3, 4, 5, 6]), (1720394701.653755, [], [1, 2, 3, 4, 5, 6])], + [(1720404840.88735, [], [1, 2, 3, 4, 5, 6]), (1720570759.329975, [], [1, 2, 3, 4, 5, 6])], + [(1720474997.255842, [], [1, 2, 3, 4, 5, 6])], + [ + (1719964981.812038, [], [1, 2, 3, 4, 5, 6]), + (1719965079.656724, [], [1, 2, 3, 4, 5, 6]), + (1719965081.766625, [], [1, 2, 3, 4, 5, 6]), + (1720017945.346535, [], [1, 2, 3, 4, 5, 6]), + (1720018196.228851, [], [1, 2, 3, 4, 5, 6]), + (1720018198.332037, [], [1, 2, 3, 4, 5, 6]), + (1720071944.789981, [], [1, 2, 3, 4, 5, 6]), + (1720071989.860765, [], [1, 2, 3, 4, 5, 6]), + (1720071991.963241, [], [1, 2, 3, 4, 5, 6]), + (1720226601.357382, [], [1, 2, 3, 4, 5, 6]), + (1720226662.671017, [], [1, 2, 3, 4, 5, 6]), + (1720226664.720854, [], [1, 2, 3, 4, 5, 6]), + (1720226666.697991, [], [1, 2, 3, 4, 5, 6]), + (1720245432.525672, [], [1, 2, 3, 4, 5, 6]), + (1720245586.690365, [], [1, 2, 3, 4, 5, 6]), + (1720245588.811888, [], [1, 2, 3, 4, 5, 6]), + (1720313288.75101, [], [1, 2, 3, 4, 5, 6]), + (1720313438.935319, [], [1, 2, 3, 4, 5, 6]), + (1720313440.997298, [], [1, 2, 3, 4, 5, 6]), + (1720325185.461926, [], [1, 2, 3, 4, 5, 6]), + (1720325279.708469, [], [1, 2, 3, 4, 5, 6]), + (1720325281.823994, [], [1, 2, 3, 4, 5, 6]), + (1720325284.895173, [], [1, 2, 3, 4, 5, 6]), + (1720325286.963747, [], [1, 2, 3, 4, 5, 6]), + (1720351212.007507, [], [1, 2, 3, 4, 5, 6]), + (1720351417.722923, [], [1, 2, 3, 4, 5, 6]), + (1720351419.786979, [], [1, 2, 3, 4, 5, 6]), + (1720410234.644402, [], [1, 2, 3, 4, 5, 6]), + (1720410236.746729, [], [1, 2, 3, 4, 5, 6]), + (1720484087.598816, [], [1, 2, 3, 4, 5, 6]), + (1720484089.656452, [], [1, 2, 3, 4, 5, 6]), + (1720560975.588946, [], [1, 2, 3, 4, 5, 6]), + (1720561062.767708, [], [1, 2, 3, 4, 5, 6]), + (1720576170.001406, [], [1, 2, 3, 4, 5, 6]), + (1720576274.339938, [], [1, 2, 3, 4, 5, 6]), + (1720634969.318238, [], [1, 2, 3, 4, 5, 6]), + (1720634971.383262, [], [1, 2, 3, 4, 5, 6]), + (1720634973.669218, [], [1, 2, 3, 4, 5, 6]), + (1720634975.727614, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720047138.987663, [], [1, 2, 3, 4, 5, 6]), + (1720239116.860589, [], [1, 2, 3, 4, 5, 6]), + (1720567216.089602, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720062114.160483, [], [1, 2, 3, 4, 5, 6])], + [(1719974901.32474, [], [1, 2, 3, 4, 5, 6]), (1720224712.94567, [], [1, 2, 3, 4, 5, 6])], + [ + (1719985511.407849, [], [1, 2, 3, 4, 5, 6]), + (1720140363.584567, [], [1, 2, 3, 4, 5, 6]), + (1720573348.34834, [], [1, 2, 3, 4, 5, 6]), + (1720649971.95392, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720325668.53657, [], [1, 2, 3, 4, 5, 6]), (1720407800.484026, [], [1, 2, 3, 4, 5, 6])], + [(1720313988.784401, [], [1, 2, 3, 4, 5, 6])], + [ + (1720137608.121513, [], [1, 2, 3, 4, 5, 6]), + (1720230219.916298, [], [1, 2, 3, 4, 5, 6]), + (1720576804.122481, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720060264.40999, [], [1, 2, 3, 4, 5, 6])], + [ + (1719982324.891431, [], [1, 2, 3, 4, 5, 6]), + (1719982326.95929, [], [1, 2, 3, 4, 5, 6]), + (1720072365.964045, [], [1, 2, 3, 4, 5, 6]), + (1720072368.013382, [], [1, 2, 3, 4, 5, 6]), + (1720185779.887725, [], [1, 2, 3, 4, 5, 6]), + (1720185782.013458, [], [1, 2, 3, 4, 5, 6]), + (1720230424.054008, [], [1, 2, 3, 4, 5, 6]), + (1720329463.631365, [], [1, 2, 3, 4, 5, 6]), + (1720329503.210461, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719978020.337077, [], [1, 2, 3, 4, 5, 6]), (1720378278.012205, [], [1, 2, 3, 4, 5, 6])], + [ + (1720198700.302556, [], [1, 2, 3, 4, 5, 6]), + (1720417057.718199, [], [1, 2, 3, 4, 5, 6]), + (1720584860.786802, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720627126.675399, [], [1, 2, 3, 4, 5, 6])], + [(1720072022.286212, [], [1, 2, 3, 4, 5, 6]), (1720366423.980574, [], [1, 2, 3, 4, 5, 6])], + [(1720221042.039954, [], [1, 2, 3, 4, 5, 6])], + [ + (1720060635.731519, [], [1, 2, 3, 4, 5, 6]), + (1720210299.946067, [], [1, 2, 3, 4, 5, 6]), + (1720283154.070272, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720402469.930097, [], [1, 2, 3, 4, 5, 6])], + [ + (1719972611.185894, [], [1, 2, 3, 4, 5, 6]), + (1720227219.185837, [], [1, 2, 3, 4, 5, 6]), + (1720565623.051185, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720060770.015072, [], [1, 2, 3, 4, 5, 6])], + [ + (1719979906.872722, [], [1, 2, 3, 4, 5, 6]), + (1719980094.376717, [], [1, 2, 3, 4, 5, 6]), + (1719980096.476308, [], [1, 2, 3, 4, 5, 6]), + (1720067152.682142, [], [1, 2, 3, 4, 5, 6]), + (1720067174.598435, [], [1, 2, 3, 4, 5, 6]), + (1720137635.975558, [], [1, 2, 3, 4, 5, 6]), + (1720137733.593423, [], [1, 2, 3, 4, 5, 6]), + (1720224539.774939, [], [1, 2, 3, 4, 5, 6]), + (1720323664.982932, [], [1, 2, 3, 4, 5, 6]), + (1720400336.830381, [], [1, 2, 3, 4, 5, 6]), + (1720400497.747426, [], [1, 2, 3, 4, 5, 6]), + (1720400499.843107, [], [1, 2, 3, 4, 5, 6]), + (1720486404.88152, [], [1, 2, 3, 4, 5, 6]), + (1720486460.387837, [], [1, 2, 3, 4, 5, 6]), + (1720486462.465262, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720494960.521565, [], [1, 2, 3, 4, 5, 6])], + [(1720492283.522426, [], [1, 2, 3, 4, 5, 6])], + [(1720588131.39025, [], [1, 2, 3, 4, 5, 6])], + [(1719965172.184078, [], [1, 2, 3, 4, 5, 6])], + [(1720313653.224728, [], [1, 2, 3, 4, 5, 6])], + [(1720133961.331413, [], [1, 2, 3, 4, 5, 6])], + [(1719969914.979558, [], [1, 2, 3, 4, 5, 6])], + [ + (1720051155.959984, [], [1, 2, 3, 4, 5, 6]), + (1720318569.685111, [], [1, 2, 3, 4, 5, 6]), + (1720499729.951734, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720328273.411971, [], [1, 2, 3, 4, 5, 6])], + [ + (1719982315.965122, [], [1, 2, 3, 4, 5, 6]), + (1720423276.150804, [], [1, 2, 3, 4, 5, 6]), + (1720586911.740203, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719968679.211527, [], [1, 2, 3, 4, 5, 6])], + [(1720063388.278848, [], [1, 2, 3, 4, 5, 6]), (1720416336.796001, [], [1, 2, 3, 4, 5, 6])], + [(1720398479.735494, [], [1, 2, 3, 4, 5, 6]), (1720493260.033312, [], [1, 2, 3, 4, 5, 6])], + [ + (1720489609.661573, [], [1, 2, 3, 4, 5, 6]), + (1720489700.750791, [], [1, 2, 3, 4, 5, 6]), + (1720489717.546997, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720500732.208908, [], [1, 2, 3, 4, 5, 6])], + [ + (1720153118.225066, [], [1, 2, 3, 4, 5, 6]), + (1720314031.634943, [], [1, 2, 3, 4, 5, 6]), + (1720590337.724401, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720070140.554847, [], [1, 2, 3, 4, 5, 6]), (1720137932.433554, [], [1, 2, 3, 4, 5, 6])], + [(1719992154.926275, [], [1, 2, 3, 4, 5, 6]), (1720320574.945082, [], [1, 2, 3, 4, 5, 6])], + [(1719984916.520671, [], [1, 2, 3, 4, 5, 6]), (1720569849.178614, [], [1, 2, 3, 4, 5, 6])], + [(1720140614.641046, [], [1, 2, 3, 4, 5, 6]), (1720395184.350061, [], [1, 2, 3, 4, 5, 6])], + [(1720310387.035179, [], [1, 2, 3, 4, 5, 6]), (1720473940.199193, [], [1, 2, 3, 4, 5, 6])], + [(1720062920.051834, [], [1, 2, 3, 4, 5, 6]), (1720226181.474055, [], [1, 2, 3, 4, 5, 6])], + [(1720470329.222623, [], [1, 2, 3, 4, 5, 6])], + [(1720582334.499662, [], [1, 2, 3, 4, 5, 6])], + [(1720443828.896214, [], [1, 2, 3, 4, 5, 6]), (1720580682.756419, [], [1, 2, 3, 4, 5, 6])], + [ + (1720226425.344326, [], [1, 2, 3, 4, 5, 6]), + (1720310598.961662, [], [1, 2, 3, 4, 5, 6]), + (1720589761.631011, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720584698.862455, [], [1, 2, 3, 4, 5, 6])], + [(1720164879.185564, [], [1, 2, 3, 4, 5, 6]), (1720323846.480885, [], [1, 2, 3, 4, 5, 6])], + [(1720051096.071376, [], [1, 2, 3, 4, 5, 6]), (1720157299.452758, [], [1, 2, 3, 4, 5, 6])], + [ + (1720223524.412388, [], [1, 2, 3, 4, 5, 6]), + (1720326592.782923, [], [1, 2, 3, 4, 5, 6]), + (1720578100.065601, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720313647.455076, [], [1, 2, 3, 4, 5, 6]), (1720559337.211802, [], [1, 2, 3, 4, 5, 6])], + [ + (1719981335.449573, [], [1, 2, 3, 4, 5, 6]), + (1720067595.1521, [], [1, 2, 3, 4, 5, 6]), + (1720319132.823969, [], [1, 2, 3, 4, 5, 6]), + (1720491547.165147, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720019886.753884, [], [1, 2, 3, 4, 5, 6]), + (1720153101.556554, [], [1, 2, 3, 4, 5, 6]), + (1720313536.357232, [], [1, 2, 3, 4, 5, 6]), + (1720485395.202604, [], [1, 2, 3, 4, 5, 6]), + (1720568839.562655, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720310841.194378, [], [1, 2, 3, 4, 5, 6]), + (1720310912.574061, [], [1, 2, 3, 4, 5, 6]), + (1720310914.655803, [], [1, 2, 3, 4, 5, 6]), + (1720587828.804404, [], [1, 2, 3, 4, 5, 6]), + (1720588071.078858, [], [1, 2, 3, 4, 5, 6]), + (1720588073.115074, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720055953.618503, [], [1, 2, 3, 4, 5, 6]), + (1720223652.080905, [], [1, 2, 3, 4, 5, 6]), + (1720308372.703732, [], [1, 2, 3, 4, 5, 6]), + (1720624033.359415, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720173756.125859, [], [1, 2, 3, 4, 5, 6]), + (1720315128.683231, [], [1, 2, 3, 4, 5, 6]), + (1720466410.646777, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720157263.810637, [], [1, 2, 3, 4, 5, 6]), (1720235081.49838, [], [1, 2, 3, 4, 5, 6])], + [ + (1720229216.005254, [], [1, 2, 3, 4, 5, 6]), + (1720557735.625871, [], [1, 2, 3, 4, 5, 6]), + (1720627294.521232, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720394336.326148, [], [1, 2, 3, 4, 5, 6]), + (1720394382.775033, [], [1, 2, 3, 4, 5, 6]), + (1720394404.054186, [], [1, 2, 3, 4, 5, 6]), + (1720394488.655765, [], [1, 2, 3, 4, 5, 6]), + (1720394583.815862, [], [1, 2, 3, 4, 5, 6]), + (1720394609.744123, [], [1, 2, 3, 4, 5, 6]), + (1720394643.351845, [], [1, 2, 3, 4, 5, 6]), + (1720394661.447752, [], [1, 2, 3, 4, 5, 6]), + (1720394715.354794, [], [1, 2, 3, 4, 5, 6]), + ], + ] + for b in a: + calculate_funnel_from_user_events(6, 1123200, "first_touch", "ordered", [[]], b) diff --git a/posthog/user_scripts/aggregate_funnel.py b/posthog/user_scripts/aggregate_funnel.py new file mode 100755 index 0000000000000..162918a819625 --- /dev/null +++ b/posthog/user_scripts/aggregate_funnel.py @@ -0,0 +1,144 @@ +#!/usr/bin/python3 +import json +import sys +from dataclasses import dataclass, replace +from itertools import groupby, permutations +from typing import Any, cast +from collections.abc import Sequence + + +def parse_args(line): + args = json.loads(line) + return [ + int(args["num_steps"]), + int(args["conversion_window_limit"]), + str(args["breakdown_attribution_type"]), + str(args["funnel_order_type"]), + args["prop_vals"], # Array(Array(String)) + args["value"], # Array(Tuple(Nullable(Float64), Nullable(DateTime), Array(String), Array(Int8))) + ] + + +@dataclass(frozen=True) +class EnteredTimestamp: + timestamp: Any + timings: Any + + +# each one can be multiple steps here +# it only matters when they entered the funnel - you can propagate the time from the previous step when you update +# This function is defined for Clickhouse in user_defined_functions.xml along with types +# num_steps is the total number of steps in the funnel +# conversion_window_limit is in seconds +# events is a array of tuples of (timestamp, breakdown, [steps]) +# steps is an array of integers which represent the steps that this event qualifies for. it looks like [1,3,5,6]. +# negative integers represent an exclusion on that step. each event is either all exclusions or all steps. +def calculate_funnel_from_user_events( + num_steps: int, + conversion_window_limit_seconds: int, + breakdown_attribution_type: str, + funnel_order_type: str, + prop_vals: list[Any], + events: Sequence[tuple[float, list[str] | int | str, list[int]]], +): + default_entered_timestamp = EnteredTimestamp(0, []) + max_step = [0, default_entered_timestamp] + # If the attribution mode is a breakdown step, set this to the integer that represents that step + breakdown_step = int(breakdown_attribution_type[5:]) if breakdown_attribution_type.startswith("step_") else None + + # This function returns an Array. We build up an array of strings to return here. + results: list[tuple[int, Any, list[float]]] = [] + + # Process an event. If this hits an exclusion, return False, else return True. + def process_event(timestamp, breakdown, steps, *, entered_timestamp, prop_val) -> bool: + # iterate the steps in reverse so we don't count this event multiple times + for step in reversed(steps): + exclusion = False + if step < 0: + exclusion = True + step = -step + + in_match_window = timestamp - entered_timestamp[step - 1].timestamp <= conversion_window_limit_seconds + already_reached_this_step_with_same_entered_timestamp = ( + entered_timestamp[step].timestamp == entered_timestamp[step - 1].timestamp + ) + + if in_match_window and not already_reached_this_step_with_same_entered_timestamp: + if exclusion: + results.append((-1, prop_val, [])) + return False + is_unmatched_step_attribution = ( + breakdown_step is not None and step == breakdown_step - 1 and prop_val != breakdown + ) + if not is_unmatched_step_attribution: + entered_timestamp[step] = replace( + entered_timestamp[step - 1], timings=[*entered_timestamp[step - 1].timings, timestamp] + ) + if step > max_step[0]: + max_step[:] = (step, entered_timestamp[step]) + + if funnel_order_type == "strict": + for i in range(len(entered_timestamp)): + if i not in steps: + entered_timestamp[i] = default_entered_timestamp + + return True + + # We call this for each possible breakdown value. + def loop_prop_val(prop_val): + # an array of when the user entered the funnel + # entered_timestamp = [(0, "", [])] * (num_steps + 1) + max_step[:] = [0, default_entered_timestamp] + entered_timestamp: list[EnteredTimestamp] = [default_entered_timestamp] * (num_steps + 1) + + def add_max_step(): + i = cast(int, max_step[0]) + final = cast(EnteredTimestamp, max_step[1]) + results.append((i - 1, prop_val, [final.timings[i] - final.timings[i - 1] for i in range(1, i)])) + + filtered_events = ( + ((timestamp, breakdown, steps) for (timestamp, breakdown, steps) in events if breakdown == prop_val) + if breakdown_attribution_type == "all_events" + else events + ) + for timestamp, events_with_same_timestamp_iterator in groupby(filtered_events, key=lambda x: x[0]): + events_with_same_timestamp = tuple(events_with_same_timestamp_iterator) + entered_timestamp[0] = EnteredTimestamp(timestamp, []) + if len(events_with_same_timestamp) == 1: + if not process_event( + *events_with_same_timestamp[0], entered_timestamp=entered_timestamp, prop_val=prop_val + ): + return + else: + # This is a special case for events with the same timestamp + # We play all of their permutations and most generously take the ones that advanced the furthest + # This has quite bad performance, and can probably be optimized through clever but annoying logic + # but shouldn't be hit too often + entered_timestamps = [] + for events_group_perm in permutations(events_with_same_timestamp): + entered_timestamps.append(list(entered_timestamp)) + for event in events_group_perm: + if not process_event(*event, entered_timestamp=entered_timestamps[-1], prop_val=prop_val): + # If any of the permutations hits an exclusion, we exclude this user. + # This isn't an important implementation detail and we could do something smarter here. + return + for i in range(len(entered_timestamp)): + entered_timestamp[i] = max((x[i] for x in entered_timestamps), key=lambda x: x.timestamp) + + # If we have hit the goal, we can terminate early + if entered_timestamp[num_steps].timestamp > 0: + add_max_step() + return + + # Find the furthest step we have made it to and print it + add_max_step() + return + + [loop_prop_val(prop_val) for prop_val in prop_vals] + print(json.dumps({"result": results}), end="\n") # noqa: T201 + + +if __name__ == "__main__": + for line in sys.stdin: + calculate_funnel_from_user_events(*parse_args(line)) + sys.stdout.flush() diff --git a/posthog/user_scripts/aggregate_funnel_array.py b/posthog/user_scripts/aggregate_funnel_array.py new file mode 100755 index 0000000000000..17b053bb7d448 --- /dev/null +++ b/posthog/user_scripts/aggregate_funnel_array.py @@ -0,0 +1,9 @@ +#!/usr/bin/python3 +import sys + +from aggregate_funnel import parse_args, calculate_funnel_from_user_events + +if __name__ == "__main__": + for line in sys.stdin: + calculate_funnel_from_user_events(*parse_args(line)) + sys.stdout.flush() diff --git a/posthog/user_scripts/aggregate_funnel_array_trends.py b/posthog/user_scripts/aggregate_funnel_array_trends.py new file mode 100755 index 0000000000000..15e93f5452797 --- /dev/null +++ b/posthog/user_scripts/aggregate_funnel_array_trends.py @@ -0,0 +1,9 @@ +#!/usr/bin/python3 +import sys + +from aggregate_funnel_trends import parse_args, calculate_funnel_trends_from_user_events + +if __name__ == "__main__": + for line in sys.stdin: + calculate_funnel_trends_from_user_events(*parse_args(line)) + sys.stdout.flush() diff --git a/posthog/user_scripts/aggregate_funnel_array_trends_test.py b/posthog/user_scripts/aggregate_funnel_array_trends_test.py new file mode 100755 index 0000000000000..44d3cc9b8f059 --- /dev/null +++ b/posthog/user_scripts/aggregate_funnel_array_trends_test.py @@ -0,0 +1,13 @@ +#!/usr/bin/python3 + +from aggregate_funnel_trends import calculate_funnel_trends_from_user_events, parse_args +import sys +import json + +if __name__ == "__main__": + for line in sys.stdin: + try: + calculate_funnel_trends_from_user_events(*parse_args(line)) + except Exception as e: + print(json.dumps({"result": json.dumps(str(e))}), end="\n") # noqa: T201 + sys.stdout.flush() diff --git a/posthog/user_scripts/aggregate_funnel_cohort.py b/posthog/user_scripts/aggregate_funnel_cohort.py new file mode 100755 index 0000000000000..17b053bb7d448 --- /dev/null +++ b/posthog/user_scripts/aggregate_funnel_cohort.py @@ -0,0 +1,9 @@ +#!/usr/bin/python3 +import sys + +from aggregate_funnel import parse_args, calculate_funnel_from_user_events + +if __name__ == "__main__": + for line in sys.stdin: + calculate_funnel_from_user_events(*parse_args(line)) + sys.stdout.flush() diff --git a/posthog/user_scripts/aggregate_funnel_cohort_trends.py b/posthog/user_scripts/aggregate_funnel_cohort_trends.py new file mode 100755 index 0000000000000..15e93f5452797 --- /dev/null +++ b/posthog/user_scripts/aggregate_funnel_cohort_trends.py @@ -0,0 +1,9 @@ +#!/usr/bin/python3 +import sys + +from aggregate_funnel_trends import parse_args, calculate_funnel_trends_from_user_events + +if __name__ == "__main__": + for line in sys.stdin: + calculate_funnel_trends_from_user_events(*parse_args(line)) + sys.stdout.flush() diff --git a/posthog/user_scripts/aggregate_funnel_test.py b/posthog/user_scripts/aggregate_funnel_test.py new file mode 100755 index 0000000000000..e0689b82af21c --- /dev/null +++ b/posthog/user_scripts/aggregate_funnel_test.py @@ -0,0 +1,13 @@ +#!/usr/bin/python3 +import json + +from aggregate_funnel import calculate_funnel_from_user_events, parse_args +import sys + +if __name__ == "__main__": + for line in sys.stdin: + try: + calculate_funnel_from_user_events(*parse_args(line)) + except Exception as e: + print(json.dumps({"result": json.dumps(str(e))}), end="\n") # noqa: T201 + sys.stdout.flush() diff --git a/posthog/user_scripts/aggregate_funnel_trends.py b/posthog/user_scripts/aggregate_funnel_trends.py new file mode 100755 index 0000000000000..0aa96b7a19b96 --- /dev/null +++ b/posthog/user_scripts/aggregate_funnel_trends.py @@ -0,0 +1,131 @@ +#!/usr/bin/python3 +import sys +from dataclasses import dataclass, replace +from typing import Any +from collections.abc import Sequence +import json + + +def parse_args(line): + args = json.loads(line) + return [ + int(args["from_step"]), + int(args["num_steps"]), + int(args["conversion_window_limit"]), + str(args["breakdown_attribution_type"]), + str(args["funnel_order_type"]), + args["prop_vals"], # Array(Array(String)) + args["value"], # Array(Tuple(Nullable(Float64), Nullable(DateTime), Array(String), Array(Int8))) + ] + + +@dataclass(frozen=True) +class EnteredTimestamp: + timestamp: Any + timings: Any + + +# each one can be multiple steps here +# it only matters when they entered the funnel - you can propagate the time from the previous step when you update +# This function is defined for Clickhouse in user_defined_functions.xml along with types +# num_steps is the total number of steps in the funnel +# conversion_window_limit is in seconds +# events is a array of tuples of (timestamp, breakdown, [steps]) +# steps is an array of integers which represent the steps that this event qualifies for. it looks like [1,3,5,6]. +# negative integers represent an exclusion on that step. each event is either all exclusions or all steps. +def calculate_funnel_trends_from_user_events( + from_step: int, + num_steps: int, + conversion_window_limit_seconds: int, + breakdown_attribution_type: str, + funnel_order_type: str, + prop_vals: list[Any], + events: Sequence[tuple[float, int, list[str] | int | str, list[int]]], +): + default_entered_timestamp = EnteredTimestamp(0, []) + # If the attribution mode is a breakdown step, set this to the integer that represents that step + breakdown_step = int(breakdown_attribution_type[5:]) if breakdown_attribution_type.startswith("step_") else None + + # Results is a map of start intervals to success or failure. If an interval isn't here, it means the + # user didn't enter + results = {} + + # We call this for each possible breakdown value. + def loop_prop_val(prop_val): + # we need to track every distinct entry into the funnel through to the end + filtered_events = ( + ( + (timestamp, interval_start, breakdown, steps) + for (timestamp, interval_start, breakdown, steps) in events + if breakdown == prop_val + ) + if breakdown_attribution_type == "all_events" + else events + ) + list_of_entered_timestamps = [] + + for timestamp, interval_start, breakdown, steps in filtered_events: + for step in reversed(steps): + exclusion = False + if step < 0: + exclusion = True + step = -step + # Special code to handle the first step + # Potential Optimization: we could skip tracking here if the user has already completed the funnel for this interval + if step == 1: + entered_timestamp = [default_entered_timestamp] * (num_steps + 1) + # Set the interval start at 0, which is what we want to return if this works. + # For strict funnels, we need to track if the "from_step" has been hit + # Abuse the timings field on the 0th index entered_timestamp to have the elt True if we have + entered_timestamp[0] = EnteredTimestamp(interval_start, [True] if from_step == 0 else []) + entered_timestamp[1] = EnteredTimestamp(timestamp, [timestamp]) + list_of_entered_timestamps.append(entered_timestamp) + else: + for entered_timestamp in list_of_entered_timestamps[:]: + in_match_window = ( + timestamp - entered_timestamp[step - 1].timestamp <= conversion_window_limit_seconds + ) + already_reached_this_step_with_same_entered_timestamp = ( + entered_timestamp[step].timestamp == entered_timestamp[step - 1].timestamp + ) + if in_match_window and not already_reached_this_step_with_same_entered_timestamp: + if exclusion: + # this is a complete failure, exclude this person, don't print anything, don't count + return False + is_unmatched_step_attribution = ( + breakdown_step is not None and step == breakdown_step - 1 and prop_val != breakdown + ) + if not is_unmatched_step_attribution: + entered_timestamp[step] = replace( + entered_timestamp[step - 1], + timings=[*entered_timestamp[step - 1].timings, timestamp], + ) + # check if we have hit the goal. if we have, remove it from the list and add it to the successful_timestamps + if entered_timestamp[num_steps].timestamp > 0: + results[entered_timestamp[0].timestamp] = (1, prop_val) + list_of_entered_timestamps.remove(entered_timestamp) + # If we have hit the from_step threshold, record it (abuse the timings field) + elif step == from_step + 1: + entered_timestamp[0].timings.append(True) + + # At the end of the event, clear all steps that weren't done by that event + if funnel_order_type == "strict": + for entered_timestamp in list_of_entered_timestamps[:]: + for i in range(1, len(entered_timestamp)): + if i not in steps: + entered_timestamp[i] = default_entered_timestamp + + # At this point, everything left in entered_timestamps is a failure, if it has made it to from_step + for entered_timestamp in list_of_entered_timestamps: + if entered_timestamp[0].timestamp not in results and len(entered_timestamp[0].timings) > 0: + results[entered_timestamp[0].timestamp] = (-1, prop_val) + + [loop_prop_val(prop_val) for prop_val in prop_vals] + result = [(interval_start, success_bool, prop_val) for interval_start, (success_bool, prop_val) in results.items()] + print(json.dumps({"result": result}), end="\n") # noqa: T201 + + +if __name__ == "__main__": + for line in sys.stdin: + calculate_funnel_trends_from_user_events(*parse_args(line)) + sys.stdout.flush() diff --git a/posthog/utils.py b/posthog/utils.py index 39bf6d606982f..aaf02658b42d1 100644 --- a/posthog/utils.py +++ b/posthog/utils.py @@ -58,6 +58,7 @@ from posthog.models import Team, User DATERANGE_MAP = { + "second": datetime.timedelta(seconds=1), "minute": datetime.timedelta(minutes=1), "hour": datetime.timedelta(hours=1), "day": datetime.timedelta(days=1), diff --git a/posthog/warehouse/api/external_data_source.py b/posthog/warehouse/api/external_data_source.py index 2e3f66de9c630..24439fcecdc19 100644 --- a/posthog/warehouse/api/external_data_source.py +++ b/posthog/warehouse/api/external_data_source.py @@ -24,6 +24,7 @@ from posthog.hogql.database.database import create_hogql_database from posthog.temporal.data_imports.pipelines.stripe import validate_credentials as validate_stripe_credentials from posthog.temporal.data_imports.pipelines.zendesk import validate_credentials as validate_zendesk_credentials +from posthog.temporal.data_imports.pipelines.vitally import validate_credentials as validate_vitally_credentials from posthog.temporal.data_imports.pipelines.schemas import ( PIPELINE_TYPE_INCREMENTAL_ENDPOINTS_MAPPING, PIPELINE_TYPE_INCREMENTAL_FIELDS_MAPPING, @@ -280,6 +281,8 @@ def create(self, request: Request, *args: Any, **kwargs: Any) -> Response: new_source_model = self._handle_zendesk_source(request, *args, **kwargs) elif source_type == ExternalDataSource.Type.SALESFORCE: new_source_model = self._handle_salesforce_source(request, *args, **kwargs) + elif source_type == ExternalDataSource.Type.VITALLY: + new_source_model = self._handle_vitally_source(request, *args, **kwargs) elif source_type in [ ExternalDataSource.Type.POSTGRES, ExternalDataSource.Type.MYSQL, @@ -395,6 +398,28 @@ def _handle_stripe_source(self, request: Request, *args: Any, **kwargs: Any) -> return new_source_model + def _handle_vitally_source(self, request: Request, *args: Any, **kwargs: Any) -> ExternalDataSource: + payload = request.data["payload"] + secret_token = payload.get("secret_token") + region = payload.get("region") + subdomain = payload.get("subdomain", None) + prefix = request.data.get("prefix", None) + source_type = request.data["source_type"] + + # TODO: remove dummy vars + new_source_model = ExternalDataSource.objects.create( + source_id=str(uuid.uuid4()), + connection_id=str(uuid.uuid4()), + destination_id=str(uuid.uuid4()), + team=self.team, + status="Running", + source_type=source_type, + job_inputs={"secret_token": secret_token, "region": region, "subdomain": subdomain}, + prefix=prefix, + ) + + return new_source_model + def _handle_zendesk_source(self, request: Request, *args: Any, **kwargs: Any) -> ExternalDataSource: payload = request.data["payload"] api_key = payload.get("api_key") @@ -690,6 +715,15 @@ def database_schema(self, request: Request, *arg: Any, **kwargs: Any): status=status.HTTP_400_BAD_REQUEST, data={"message": "Invalid credentials: Zendesk credentials are incorrect"}, ) + elif source_type == ExternalDataSource.Type.VITALLY: + secret_token = request.data.get("secret_token", "") + region = request.data.get("region", "") + subdomain = request.data.get("subdomain", "") + if not validate_vitally_credentials(subdomain=subdomain, secret_token=secret_token, region=region): + return Response( + status=status.HTTP_400_BAD_REQUEST, + data={"message": "Invalid credentials: Zendesk credentials are incorrect"}, + ) # Get schemas and validate SQL credentials if source_type in [ diff --git a/posthog/warehouse/api/saved_query.py b/posthog/warehouse/api/saved_query.py index 0c233001af365..3e81117190bbd 100644 --- a/posthog/warehouse/api/saved_query.py +++ b/posthog/warehouse/api/saved_query.py @@ -15,6 +15,7 @@ from posthog.hogql.parser import parse_select from posthog.hogql.printer import print_ast from posthog.warehouse.models import DataWarehouseJoin, DataWarehouseModelPath, DataWarehouseSavedQuery +import uuid logger = structlog.get_logger(__name__) @@ -171,14 +172,14 @@ def ancestors(self, request: request.Request, *args, **kwargs) -> response.Respo if not paths: return response.Response({"ancestors": []}) - ancestors: set[str] = set() + ancestors: set[str | uuid.UUID] = set() for model_path in paths: if up_to_level is None: start = 0 else: start = (int(up_to_level) * -1) - 1 - ancestors = ancestors.union(model_path.path[start:-1]) + ancestors = ancestors.union(map(try_convert_to_uuid, model_path.path[start:-1])) return response.Response({"ancestors": ancestors}) @@ -201,7 +202,7 @@ def descendants(self, request: request.Request, *args, **kwargs) -> response.Res if not paths: return response.Response({"descendants": []}) - descendants: set[str] = set() + descendants: set[str | uuid.UUID] = set() for model_path in paths: start = model_path.path.index(saved_query_id) + 1 if up_to_level is None: @@ -209,6 +210,13 @@ def descendants(self, request: request.Request, *args, **kwargs) -> response.Res else: end = start + up_to_level - descendants = descendants.union(model_path.path[start:end]) + descendants = descendants.union(map(try_convert_to_uuid, model_path.path[start:end])) return response.Response({"descendants": descendants}) + + +def try_convert_to_uuid(s: str) -> uuid.UUID | str: + try: + return str(uuid.UUID(s)) + except ValueError: + return s diff --git a/posthog/warehouse/api/test/test_saved_query.py b/posthog/warehouse/api/test/test_saved_query.py index a0abdf02c5e98..e739a1d0b5b97 100644 --- a/posthog/warehouse/api/test/test_saved_query.py +++ b/posthog/warehouse/api/test/test_saved_query.py @@ -231,7 +231,7 @@ def test_ancestors(self): self.assertEqual(response.status_code, 200, response.content) child_ancestors = response.json()["ancestors"] child_ancestors.sort() - self.assertEqual(child_ancestors, sorted([uuid.UUID(saved_query_parent_id).hex, "events", "persons"])) + self.assertEqual(child_ancestors, sorted([saved_query_parent_id, "events", "persons"])) response = self.client.post( f"/api/projects/{self.team.id}/warehouse_saved_queries/{saved_query_child_id}/ancestors", {"level": 1} @@ -240,7 +240,7 @@ def test_ancestors(self): self.assertEqual(response.status_code, 200, response.content) child_ancestors_level_1 = response.json()["ancestors"] child_ancestors_level_1.sort() - self.assertEqual(child_ancestors_level_1, [uuid.UUID(saved_query_parent_id).hex]) + self.assertEqual(child_ancestors_level_1, [saved_query_parent_id]) response = self.client.post( f"/api/projects/{self.team.id}/warehouse_saved_queries/{saved_query_child_id}/ancestors", {"level": 2} @@ -248,7 +248,7 @@ def test_ancestors(self): self.assertEqual(response.status_code, 200, response.content) child_ancestors_level_2 = response.json()["ancestors"] child_ancestors_level_2.sort() - self.assertEqual(child_ancestors_level_2, sorted([uuid.UUID(saved_query_parent_id).hex, "events", "persons"])) + self.assertEqual(child_ancestors_level_2, sorted([saved_query_parent_id, "events", "persons"])) response = self.client.post( f"/api/projects/{self.team.id}/warehouse_saved_queries/{saved_query_child_id}/ancestors", {"level": 10} @@ -256,7 +256,7 @@ def test_ancestors(self): self.assertEqual(response.status_code, 200, response.content) child_ancestors_level_10 = response.json()["ancestors"] child_ancestors_level_10.sort() - self.assertEqual(child_ancestors_level_2, sorted([uuid.UUID(saved_query_parent_id).hex, "events", "persons"])) + self.assertEqual(child_ancestors_level_10, sorted([saved_query_parent_id, "events", "persons"])) def test_descendants(self): query = """\ @@ -316,7 +316,7 @@ def test_descendants(self): parent_descendants = response.json()["descendants"] self.assertEqual( sorted(parent_descendants), - sorted([uuid.UUID(saved_query_child_id).hex, uuid.UUID(saved_query_grand_child_id).hex]), + sorted([saved_query_child_id, saved_query_grand_child_id]), ) response = self.client.post( @@ -327,7 +327,7 @@ def test_descendants(self): parent_descendants_level_1 = response.json()["descendants"] self.assertEqual( parent_descendants_level_1, - [uuid.UUID(saved_query_child_id).hex], + [saved_query_child_id], ) response = self.client.post( @@ -338,7 +338,7 @@ def test_descendants(self): parent_descendants_level_2 = response.json()["descendants"] self.assertEqual( sorted(parent_descendants_level_2), - sorted([uuid.UUID(saved_query_child_id).hex, uuid.UUID(saved_query_grand_child_id).hex]), + sorted([saved_query_child_id, saved_query_grand_child_id]), ) response = self.client.post( @@ -347,7 +347,7 @@ def test_descendants(self): self.assertEqual(response.status_code, 200, response.content) child_ancestors = response.json()["descendants"] - self.assertEqual(child_ancestors, [uuid.UUID(saved_query_grand_child_id).hex]) + self.assertEqual(child_ancestors, [saved_query_grand_child_id]) response = self.client.post( f"/api/projects/{self.team.id}/warehouse_saved_queries/{saved_query_grand_child_id}/descendants", diff --git a/posthog/warehouse/models/datawarehouse_saved_query.py b/posthog/warehouse/models/datawarehouse_saved_query.py index e3cccac60488b..ae24533754d96 100644 --- a/posthog/warehouse/models/datawarehouse_saved_query.py +++ b/posthog/warehouse/models/datawarehouse_saved_query.py @@ -150,6 +150,7 @@ def hogql_definition(self, modifiers: Optional[HogQLQueryModifiers] = None) -> S fields[column] = hogql_type(name=column) return SavedQuery( + id=str(self.id), name=self.name, query=self.query["query"], fields=fields, diff --git a/posthog/warehouse/models/external_data_schema.py b/posthog/warehouse/models/external_data_schema.py index 83f16eaa9aa1f..a3ba7730aaaa3 100644 --- a/posthog/warehouse/models/external_data_schema.py +++ b/posthog/warehouse/models/external_data_schema.py @@ -90,7 +90,9 @@ def aget_schema_if_exists(schema_name: str, team_id: int, source_id: uuid.UUID) @database_sync_to_async def aget_schema_by_id(schema_id: str, team_id: int) -> ExternalDataSchema | None: - return ExternalDataSchema.objects.prefetch_related("source").get(id=schema_id, team_id=team_id) + return ( + ExternalDataSchema.objects.prefetch_related("source").exclude(deleted=True).get(id=schema_id, team_id=team_id) + ) @database_sync_to_async diff --git a/posthog/warehouse/models/external_data_source.py b/posthog/warehouse/models/external_data_source.py index 6f9fe14e01dd9..14dd7c99dd88c 100644 --- a/posthog/warehouse/models/external_data_source.py +++ b/posthog/warehouse/models/external_data_source.py @@ -23,6 +23,7 @@ class Type(models.TextChoices): SALESFORCE = "Salesforce", "Salesforce" MYSQL = "MySQL", "MySQL" MSSQL = "MSSQL", "MSSQL" + VITALLY = "Vitally", "Vitally" class Status(models.TextChoices): RUNNING = "Running", "Running" diff --git a/requirements.in b/requirements.in index 17c4feb2f808d..2a2a7c6e3b9b9 100644 --- a/requirements.in +++ b/requirements.in @@ -59,6 +59,7 @@ paramiko==3.4.0 Pillow==10.2.0 pdpyras==5.2.0 posthoganalytics==3.5.0 +psutil==6.0.0 psycopg2-binary==2.9.7 pymssql==2.3.0 PyMySQL==1.1.1 @@ -90,7 +91,7 @@ sshtunnel==0.4.0 statshog==1.0.6 structlog==23.2.0 sqlparse==0.4.4 -temporalio==1.6.0 +temporalio==1.7.0 token-bucket==0.3.0 toronado==0.1.0 webdriver_manager==4.0.1 diff --git a/requirements.txt b/requirements.txt index c8d3e50b4256c..0289e2e505d1b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -448,6 +448,8 @@ protobuf==4.22.1 # grpcio-status # proto-plus # temporalio +psutil==6.0.0 + # via -r requirements.in psycopg==3.1.20 # via -r requirements.in psycopg-binary==3.1.20 @@ -665,7 +667,7 @@ structlog==23.2.0 # via # -r requirements.in # django-structlog -temporalio==1.6.0 +temporalio==1.7.0 # via -r requirements.in tenacity==8.2.3 # via diff --git a/rust/Cargo.lock b/rust/Cargo.lock index 8b577220c3ae0..117d4bae8a717 100644 --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -620,6 +620,7 @@ dependencies = [ "axum-test-helper", "base64 0.22.0", "bytes", + "common-alloc", "envconfig", "flate2", "futures", @@ -639,7 +640,6 @@ dependencies = [ "serde_json", "serde_urlencoded", "thiserror", - "tikv-jemallocator", "time", "tokio", "tower", @@ -703,6 +703,13 @@ dependencies = [ "tokio-util", ] +[[package]] +name = "common-alloc" +version = "0.1.0" +dependencies = [ + "tikv-jemallocator", +] + [[package]] name = "common-dns" version = "0.1.0" @@ -878,6 +885,7 @@ version = "0.1.0" dependencies = [ "axum 0.7.5", "chrono", + "common-alloc", "common-dns", "common-kafka", "common-metrics", @@ -907,6 +915,7 @@ version = "0.1.0" dependencies = [ "axum 0.7.5", "chrono", + "common-alloc", "common-kafka", "common-metrics", "cyclotron-core", @@ -1157,6 +1166,7 @@ dependencies = [ "axum 0.7.5", "axum-client-ip", "bytes", + "common-alloc", "envconfig", "maxminddb", "once_cell", @@ -1587,6 +1597,7 @@ name = "hook-api" version = "0.1.0" dependencies = [ "axum 0.7.5", + "common-alloc", "common-metrics", "envconfig", "eyre", @@ -1628,6 +1639,7 @@ version = "0.1.0" dependencies = [ "async-trait", "axum 0.7.5", + "common-alloc", "common-kafka", "common-metrics", "envconfig", @@ -1653,6 +1665,7 @@ version = "0.1.0" dependencies = [ "axum 0.7.5", "chrono", + "common-alloc", "common-dns", "common-kafka", "common-metrics", @@ -2848,6 +2861,7 @@ dependencies = [ "ahash", "axum 0.7.5", "chrono", + "common-alloc", "common-metrics", "envconfig", "futures", diff --git a/rust/Cargo.toml b/rust/Cargo.toml index 39fbcb8c48449..5c30dd1a8cf46 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -7,6 +7,7 @@ members = [ "common/health", "common/metrics", "common/dns", + "common/alloc", "feature-flags", "hook-api", "hook-common", diff --git a/rust/bin/migrate-cyclotron b/rust/bin/migrate-cyclotron new file mode 100755 index 0000000000000..cde8d8b4d65fc --- /dev/null +++ b/rust/bin/migrate-cyclotron @@ -0,0 +1,10 @@ +#!/bin/sh +SCRIPT_DIR=$(dirname "$(readlink -f "$0")") + +CYCLOTRON_DATABASE_NAME=${CYCLOTRON_DATABASE_NAME:-cyclotron} +CYCLOTRON_DATABASE_URL=${CYCLOTRON_DATABASE_URL:-postgres://posthog:posthog@localhost:5432/$CYCLOTRON_DATABASE_NAME} + +echo "Performing cyclotron migrations for $CYCLOTRON_DATABASE_URL (DATABASE_NAME=$CYCLOTRON_DATABASE_NAME)" + +sqlx database create -D "$CYCLOTRON_DATABASE_URL" +sqlx migrate run -D "$CYCLOTRON_DATABASE_URL" --source $SCRIPT_DIR/../cyclotron-core/migrations diff --git a/rust/capture/Cargo.toml b/rust/capture/Cargo.toml index cc551cdac99a1..7b50fe760b742 100644 --- a/rust/capture/Cargo.toml +++ b/rust/capture/Cargo.toml @@ -7,10 +7,6 @@ edition = "2021" workspace = true [dependencies] - -[target.'cfg(not(target_env = "msvc"))'.dependencies] -tikv-jemallocator = "0.6" - anyhow = { workspace = true } async-trait = { workspace = true } axum = { workspace = true } @@ -21,6 +17,7 @@ envconfig = { workspace = true } flate2 = { workspace = true } governor = { workspace = true } health = { path = "../common/health" } +common-alloc = { path = "../common/alloc" } metrics = { workspace = true } metrics-exporter-prometheus = { workspace = true } opentelemetry = { workspace = true } diff --git a/rust/capture/src/limiters/redis.rs b/rust/capture/src/limiters/redis.rs index 132f09df77513..cc7e7d119d89b 100644 --- a/rust/capture/src/limiters/redis.rs +++ b/rust/capture/src/limiters/redis.rs @@ -1,5 +1,11 @@ use metrics::gauge; -use std::{collections::HashSet, ops::Sub, sync::Arc}; +use std::time::Duration as StdDuration; +use std::{collections::HashSet, sync::Arc}; +use time::{Duration, OffsetDateTime}; +use tokio::sync::RwLock; +use tokio::task; +use tokio::time::interval; +use tracing::instrument; use crate::redis::Client; @@ -17,18 +23,12 @@ use crate::redis::Client; /// 2. Capture should cope with redis being _totally down_, and fail open /// 3. We should not hit redis for every single request /// -/// The solution here is to read from the cache until a time interval is hit, and then fetch new -/// data. The write requires taking a lock that stalls all readers, though so long as redis reads -/// stay fast we're ok. +/// The solution here is to read from the cache and update the set in a background thread. +/// We have to lock all readers briefly while we update the set, but we don't hold the lock +/// until we already have the response from redis so it should be very short. /// /// Some small delay between an account being limited and the limit taking effect is acceptable. /// However, ideally we should not allow requests from some pods but 429 from others. -use thiserror::Error; -use time::{Duration, OffsetDateTime}; -use tokio::sync::RwLock; -use tracing::instrument; - -// todo: fetch from env const QUOTA_LIMITER_CACHE_KEY: &str = "@posthog/quota-limits/"; #[derive(Debug)] @@ -46,19 +46,12 @@ impl QuotaResource { } } -#[derive(Error, Debug)] -pub enum LimiterError { - #[error("updater already running - there can only be one")] - UpdaterRunning, -} - #[derive(Clone)] pub struct RedisLimiter { limited: Arc>>, redis: Arc, - redis_key_prefix: String, + key: String, interval: Duration, - updated: Arc>, } impl RedisLimiter { @@ -74,98 +67,67 @@ impl RedisLimiter { interval: Duration, redis: Arc, redis_key_prefix: Option, + resource: QuotaResource, ) -> anyhow::Result { let limited = Arc::new(RwLock::new(HashSet::new())); + let key_prefix = redis_key_prefix.unwrap_or_default(); - // Force an update immediately if we have any reasonable interval - let updated = OffsetDateTime::from_unix_timestamp(0)?; - let updated = Arc::new(RwLock::new(updated)); - - Ok(RedisLimiter { + let limiter = RedisLimiter { interval, limited, - updated, - redis, - redis_key_prefix: redis_key_prefix.unwrap_or_default(), - }) + redis: redis.clone(), + key: format!("{key_prefix}{QUOTA_LIMITER_CACHE_KEY}{}", resource.as_str()), + }; + + // Spawn a background task to periodically fetch data from Redis + limiter.spawn_background_update(); + + Ok(limiter) + } + + fn spawn_background_update(&self) { + let limited = Arc::clone(&self.limited); + let redis = Arc::clone(&self.redis); + let interval_duration = StdDuration::from_nanos(self.interval.whole_nanoseconds() as u64); + let key = self.key.clone(); + + // Spawn a task to periodically update the cache from Redis + task::spawn(async move { + let mut interval = interval(interval_duration); + loop { + match RedisLimiter::fetch_limited(&redis, &key).await { + Ok(set) => { + let set = HashSet::from_iter(set.iter().cloned()); + gauge!("capture_billing_limits_loaded_tokens",).set(set.len() as f64); + + let mut limited_lock = limited.write().await; + *limited_lock = set; + } + Err(e) => { + tracing::error!("Failed to update cache from Redis: {:?}", e); + } + } + + interval.tick().await; + } + }); } #[instrument(skip_all)] async fn fetch_limited( client: &Arc, - key_prefix: &str, - resource: &QuotaResource, + key: &String, ) -> anyhow::Result> { let now = OffsetDateTime::now_utc().unix_timestamp(); - let key = format!("{key_prefix}{QUOTA_LIMITER_CACHE_KEY}{}", resource.as_str()); client - .zrangebyscore(key, now.to_string(), String::from("+Inf")) + .zrangebyscore(key.to_string(), now.to_string(), String::from("+Inf")) .await } - #[instrument(skip_all, fields(key = key))] - pub async fn is_limited(&self, key: &str, resource: QuotaResource) -> bool { - // hold the read lock to clone it, very briefly. clone is ok because it's very small 🤏 - // rwlock can have many readers, but one writer. the writer will wait in a queue with all - // the readers, so we want to hold read locks for the smallest time possible to avoid - // writers waiting for too long. and vice versa. - let updated = { - let updated = self.updated.read().await; - *updated - }; - - let now = OffsetDateTime::now_utc(); - let since_update = now.sub(updated); - - // If an update is due, fetch the set from redis + cache it until the next update is due. - // Otherwise, return a value from the cache - // - // This update will block readers! Keep it fast. - if since_update > self.interval { - // open the update lock to change the update, and prevent anyone else from doing so - let mut updated = self.updated.write().await; - *updated = OffsetDateTime::now_utc(); - - let span = tracing::debug_span!("updating billing cache from redis"); - let _span = span.enter(); - - // a few requests might end up in here concurrently, but I don't think a few extra will - // be a big problem. If it is, we can rework the concurrency a bit. - // On prod atm we call this around 15 times per second at peak times, and it usually - // completes in <1ms. - - let set = Self::fetch_limited(&self.redis, &self.redis_key_prefix, &resource).await; - - tracing::debug!("fetched set from redis, caching"); - - if let Ok(set) = set { - let set = HashSet::from_iter(set.iter().cloned()); - gauge!( - "capture_billing_limits_loaded_tokens", - "resource" => resource.as_str(), - ) - .set(set.len() as f64); - - let mut limited = self.limited.write().await; - *limited = set; - - tracing::debug!("updated cache from redis"); - - limited.contains(key) - } else { - tracing::error!("failed to fetch from redis in time, failing open"); - // If we fail to fetch the set, something really wrong is happening. To avoid - // dropping events that we don't mean to drop, fail open and accept data. Better - // than angry customers :) - // - // TODO: Consider backing off our redis checks - false - } - } else { - let l = self.limited.read().await; - - l.contains(key) - } + #[instrument(skip_all, fields(value = value))] + pub async fn is_limited(&self, value: &str) -> bool { + let limited = self.limited.read().await; + limited.contains(value) } } @@ -185,15 +147,12 @@ mod tests { .zrangebyscore_ret("@posthog/quota-limits/events", vec![String::from("banana")]); let client = Arc::new(client); - let limiter = RedisLimiter::new(Duration::microseconds(1), client, None) + let limiter = RedisLimiter::new(Duration::seconds(1), client, None, QuotaResource::Events) .expect("Failed to create billing limiter"); + tokio::time::sleep(std::time::Duration::from_millis(30)).await; - assert!( - !limiter - .is_limited("not_limited", QuotaResource::Events) - .await, - ); - assert!(limiter.is_limited("banana", QuotaResource::Events).await); + assert!(!limiter.is_limited("not_limited").await); + assert!(limiter.is_limited("banana").await); } #[tokio::test] @@ -205,27 +164,27 @@ mod tests { let client = Arc::new(client); // Default lookup without prefix fails - let limiter = RedisLimiter::new(Duration::microseconds(1), client.clone(), None) - .expect("Failed to create billing limiter"); - assert!(!limiter.is_limited("banana", QuotaResource::Events).await); + let limiter = RedisLimiter::new( + Duration::seconds(1), + client.clone(), + None, + QuotaResource::Events, + ) + .expect("Failed to create billing limiter"); + tokio::time::sleep(std::time::Duration::from_millis(30)).await; + assert!(!limiter.is_limited("banana").await); // Limiter using the correct prefix let prefixed_limiter = RedisLimiter::new( Duration::microseconds(1), client, Some("prefix//".to_string()), + QuotaResource::Events, ) .expect("Failed to create billing limiter"); + tokio::time::sleep(std::time::Duration::from_millis(30)).await; - assert!( - !prefixed_limiter - .is_limited("not_limited", QuotaResource::Events) - .await, - ); - assert!( - prefixed_limiter - .is_limited("banana", QuotaResource::Events) - .await - ); + assert!(!prefixed_limiter.is_limited("not_limited").await); + assert!(prefixed_limiter.is_limited("banana").await); } } diff --git a/rust/capture/src/main.rs b/rust/capture/src/main.rs index 14868f059f02b..150cf29835291 100644 --- a/rust/capture/src/main.rs +++ b/rust/capture/src/main.rs @@ -16,12 +16,7 @@ use tracing_subscriber::{EnvFilter, Layer}; use capture::config::Config; use capture::server::serve; -#[cfg(not(target_env = "msvc"))] -use tikv_jemallocator::Jemalloc; - -#[cfg(not(target_env = "msvc"))] -#[global_allocator] -static GLOBAL: Jemalloc = Jemalloc; +common_alloc::used!(); async fn shutdown() { let mut term = signal::unix::signal(signal::unix::SignalKind::terminate()) diff --git a/rust/capture/src/server.rs b/rust/capture/src/server.rs index 93ff3f646c3bc..bb6f7aaf5dd5b 100644 --- a/rust/capture/src/server.rs +++ b/rust/capture/src/server.rs @@ -6,10 +6,11 @@ use health::{ComponentStatus, HealthRegistry}; use time::Duration; use tokio::net::TcpListener; +use crate::config::CaptureMode; use crate::config::Config; use crate::limiters::overflow::OverflowLimiter; -use crate::limiters::redis::RedisLimiter; +use crate::limiters::redis::{QuotaResource, RedisLimiter}; use crate::redis::RedisClient; use crate::router; use crate::sinks::kafka::KafkaSink; @@ -28,6 +29,10 @@ where Duration::seconds(5), redis_client.clone(), config.redis_key_prefix, + match config.capture_mode { + CaptureMode::Events => QuotaResource::Events, + CaptureMode::Recordings => QuotaResource::Recordings, + }, ) .expect("failed to create billing limiter"); diff --git a/rust/capture/src/v0_endpoint.rs b/rust/capture/src/v0_endpoint.rs index 03b550cd9cdaf..e01b9cbf5bc22 100644 --- a/rust/capture/src/v0_endpoint.rs +++ b/rust/capture/src/v0_endpoint.rs @@ -13,7 +13,6 @@ use serde_json::json; use serde_json::Value; use tracing::instrument; -use crate::limiters::redis::QuotaResource; use crate::prometheus::report_dropped_events; use crate::v0_request::{Compression, ProcessingContext, RawRequest}; use crate::{ @@ -29,7 +28,6 @@ use crate::{ /// /// Because it must accommodate several shapes, it is inefficient in places. A v1 /// endpoint should be created, that only accepts the BatchedRequest payload shape. -#[allow(clippy::too_many_arguments)] async fn handle_common( state: &State, InsecureClientIp(ip): &InsecureClientIp, @@ -37,7 +35,6 @@ async fn handle_common( headers: &HeaderMap, method: &Method, path: &MatchedPath, - quota_resource: QuotaResource, body: Bytes, ) -> Result<(ProcessingContext, Vec), CaptureError> { let user_agent = headers @@ -119,7 +116,7 @@ async fn handle_common( let billing_limited = state .billing_limiter - .is_limited(context.token.as_str(), quota_resource) + .is_limited(context.token.as_str()) .await; if billing_limited { @@ -157,18 +154,7 @@ pub async fn event( path: MatchedPath, body: Bytes, ) -> Result, CaptureError> { - match handle_common( - &state, - &ip, - &meta, - &headers, - &method, - &path, - QuotaResource::Events, - body, - ) - .await - { + match handle_common(&state, &ip, &meta, &headers, &method, &path, body).await { Err(CaptureError::BillingLimit) => { // for v0 we want to just return ok 🙃 // this is because the clients are pretty dumb and will just retry over and over and @@ -227,18 +213,7 @@ pub async fn recording( path: MatchedPath, body: Bytes, ) -> Result, CaptureError> { - match handle_common( - &state, - &ip, - &meta, - &headers, - &method, - &path, - QuotaResource::Recordings, - body, - ) - .await - { + match handle_common(&state, &ip, &meta, &headers, &method, &path, body).await { Err(CaptureError::BillingLimit) => Ok(Json(CaptureResponse { status: CaptureResponseCode::Ok, quota_limited: Some(vec!["recordings".to_string()]), diff --git a/rust/capture/tests/django_compat.rs b/rust/capture/tests/django_compat.rs index d08be11c7506c..a5f81aa589c51 100644 --- a/rust/capture/tests/django_compat.rs +++ b/rust/capture/tests/django_compat.rs @@ -6,6 +6,7 @@ use base64::engine::general_purpose; use base64::Engine; use capture::api::{CaptureError, CaptureResponse, CaptureResponseCode, DataType, ProcessedEvent}; use capture::config::CaptureMode; +use capture::limiters::redis::QuotaResource; use capture::limiters::redis::RedisLimiter; use capture::redis::MockRedisClient; use capture::router::router; @@ -101,8 +102,13 @@ async fn it_matches_django_capture_behaviour() -> anyhow::Result<()> { let timesource = FixedTime { time: case.now }; let redis = Arc::new(MockRedisClient::new()); - let billing_limiter = RedisLimiter::new(Duration::weeks(1), redis.clone(), None) - .expect("failed to create billing limiter"); + let billing_limiter = RedisLimiter::new( + Duration::weeks(1), + redis.clone(), + None, + QuotaResource::Events, + ) + .expect("failed to create billing limiter"); let app = router( timesource, diff --git a/rust/common/alloc/Cargo.toml b/rust/common/alloc/Cargo.toml new file mode 100644 index 0000000000000..c000c381d3c1d --- /dev/null +++ b/rust/common/alloc/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "common-alloc" +version = "0.1.0" +edition = "2021" + +[lints] +workspace = true + +[dependencies] +[target.'cfg(not(target_env = "msvc"))'.dependencies] +tikv-jemallocator = "0.6" diff --git a/rust/common/alloc/README.md b/rust/common/alloc/README.md new file mode 100644 index 0000000000000..f35e8a6437ba7 --- /dev/null +++ b/rust/common/alloc/README.md @@ -0,0 +1,12 @@ +# What is this? + +We use jemalloc everywhere we can, for any binary that we expect to run in a long-lived process. The reason for this is that our workloads are: +- multi-threaded +- extremely prone to memory fragmentation (due to our heavy use of `serde_json`, or json generally) + +jemalloc helps reduce memory fragmentation hugely, to the point of solving production OOMs that would have made use of capture-rs for replay a non-starter with the default system allocator. + +At time of writing (2024-09-04), rust workspaces don't have good support for specifying dependencies on a per-target basis, so this crate does the work of pulling in jemalloc only when compiling for supported targets, and then exposes a simple macro to use jemalloc as the global allocator. Anyone writing a binary crate should put this macro at the top of their `main.rs`. Libraries should not make use of this crate. + +## Future work +Functions could be added to this crate to, in situations where jemalloc is in use, report a set of metrics about the allocator, as well as other functionality (health/liveness, a way to specify hooks to execute when memory usage exceeds a certain threshold, etc). Right now, it's prety barebones. \ No newline at end of file diff --git a/rust/common/alloc/src/lib.rs b/rust/common/alloc/src/lib.rs new file mode 100644 index 0000000000000..af560a96b3dc0 --- /dev/null +++ b/rust/common/alloc/src/lib.rs @@ -0,0 +1,12 @@ +#[cfg(target_env = "msvc")] +pub use std::alloc::System as DefaultAllocator; +#[cfg(not(target_env = "msvc"))] +pub use tikv_jemallocator::Jemalloc as DefaultAllocator; + +#[macro_export] +macro_rules! used { + () => { + #[global_allocator] + static GLOBAL: $crate::DefaultAllocator = $crate::DefaultAllocator; + }; +} diff --git a/rust/common/kafka/src/kafka_producer.rs b/rust/common/kafka/src/kafka_producer.rs index 4f30850125415..965406a1ce08c 100644 --- a/rust/common/kafka/src/kafka_producer.rs +++ b/rust/common/kafka/src/kafka_producer.rs @@ -3,12 +3,12 @@ use crate::config::KafkaConfig; use futures::future::join_all; use health::HealthHandle; use rdkafka::error::KafkaError; -use rdkafka::producer::{FutureProducer, FutureRecord}; +use rdkafka::producer::{FutureProducer, FutureRecord, Producer}; use rdkafka::ClientConfig; use serde::Serialize; use serde_json::error::Error as SerdeError; use thiserror::Error; -use tracing::debug; +use tracing::{debug, error, info}; pub struct KafkaContext { liveness: HealthHandle, @@ -55,7 +55,22 @@ pub async fn create_kafka_producer( let api: FutureProducer = client_config.create_with_context(KafkaContext { liveness })?; - // TODO: ping the kafka brokers to confirm configuration is OK (copy capture) + // "Ping" the Kafka brokers by requesting metadata + match api + .client() + .fetch_metadata(None, std::time::Duration::from_secs(15)) + { + Ok(metadata) => { + info!( + "Successfully connected to Kafka brokers. Found {} topics.", + metadata.topics().len() + ); + } + Err(error) => { + error!("Failed to fetch metadata from Kafka brokers: {:?}", error); + return Err(error); + } + } Ok(api) } diff --git a/rust/cyclotron-fetch/Cargo.toml b/rust/cyclotron-fetch/Cargo.toml index 69f6f4ac2adf1..8de85020ea106 100644 --- a/rust/cyclotron-fetch/Cargo.toml +++ b/rust/cyclotron-fetch/Cargo.toml @@ -19,6 +19,7 @@ cyclotron-core = { path = "../cyclotron-core" } common-metrics = { path = "../common/metrics" } common-dns = { path = "../common/dns" } common-kafka = { path = "../common/kafka" } +common-alloc = { path = "../common/alloc" } health = { path = "../common/health" } reqwest = { workspace = true } serde = { workspace = true } diff --git a/rust/cyclotron-fetch/src/main.rs b/rust/cyclotron-fetch/src/main.rs index 2013f1b6c7218..ebefa9f01d787 100644 --- a/rust/cyclotron-fetch/src/main.rs +++ b/rust/cyclotron-fetch/src/main.rs @@ -10,6 +10,8 @@ use health::HealthRegistry; use std::{future::ready, sync::Arc}; use tracing::{error, info}; +common_alloc::used!(); + async fn listen(app: Router, bind: String) -> Result<(), std::io::Error> { let listener = tokio::net::TcpListener::bind(bind).await?; diff --git a/rust/cyclotron-janitor/Cargo.toml b/rust/cyclotron-janitor/Cargo.toml index d6eb553d3e72f..15a0ae4e412f1 100644 --- a/rust/cyclotron-janitor/Cargo.toml +++ b/rust/cyclotron-janitor/Cargo.toml @@ -19,6 +19,7 @@ cyclotron-core = { path = "../cyclotron-core" } common-metrics = { path = "../common/metrics" } common-kafka = { path = "../common/kafka" } health = { path = "../common/health" } +common-alloc = { path = "../common/alloc" } time = { workspace = true } rdkafka = { workspace = true } diff --git a/rust/cyclotron-janitor/src/main.rs b/rust/cyclotron-janitor/src/main.rs index fa0f682601e61..a4a9274e08f3c 100644 --- a/rust/cyclotron-janitor/src/main.rs +++ b/rust/cyclotron-janitor/src/main.rs @@ -7,8 +7,7 @@ use health::{HealthHandle, HealthRegistry}; use std::{future::ready, time::Duration}; use tracing::{error, info}; -/// Most of this stuff is stolen pretty shamelessly from the rustyhook janitor. It'll diverge more -/// once we introduce the management command stuff, but for now it's a good starting point. +common_alloc::used!(); async fn cleanup_loop(janitor: Janitor, livenes: HealthHandle, interval_secs: u64) -> Result<()> { let mut interval = tokio::time::interval(Duration::from_secs(interval_secs)); diff --git a/rust/cyclotron-janitor/tests/janitor.rs b/rust/cyclotron-janitor/tests/janitor.rs index 7dceae4969c19..90afcfbdec45e 100644 --- a/rust/cyclotron-janitor/tests/janitor.rs +++ b/rust/cyclotron-janitor/tests/janitor.rs @@ -1,14 +1,12 @@ -use chrono::{DateTime, Duration, Timelike, Utc}; +use chrono::{Duration, Timelike, Utc}; use common_kafka::kafka_messages::app_metrics2::{ AppMetric2, Kind as AppMetric2Kind, Source as AppMetric2Source, }; use cyclotron_core::{JobInit, JobState, QueueManager, Worker}; use cyclotron_janitor::{config::JanitorSettings, janitor::Janitor}; use rdkafka::consumer::{Consumer, StreamConsumer}; -use rdkafka::types::{RDKafkaApiKey, RDKafkaRespErr}; use rdkafka::{ClientConfig, Message}; use sqlx::PgPool; -use std::str::FromStr; use uuid::Uuid; use common_kafka::{test::create_mock_kafka, APP_METRICS2_TOPIC}; @@ -58,7 +56,7 @@ async fn janitor_test(db: PgPool) { queue_name: queue_name.clone(), priority: 0, scheduled: now, - function_id: Some(uuid.clone()), + function_id: Some(uuid), vm_state: None, parameters: None, blob: None, diff --git a/rust/cyclotron-node/src/helpers.ts b/rust/cyclotron-node/src/helpers.ts new file mode 100644 index 0000000000000..ba1ace2a37161 --- /dev/null +++ b/rust/cyclotron-node/src/helpers.ts @@ -0,0 +1,30 @@ +import { CyclotronInternalPoolConfig, CyclotronPoolConfig } from './types' + +export function convertToInternalPoolConfig(poolConfig: CyclotronPoolConfig): CyclotronInternalPoolConfig { + return { + db_url: poolConfig.dbUrl, + max_connections: poolConfig.maxConnections, + min_connections: poolConfig.minConnections, + acquire_timeout_seconds: poolConfig.acquireTimeoutSeconds, + max_lifetime_seconds: poolConfig.maxLifetimeSeconds, + idle_timeout_seconds: poolConfig.idleTimeoutSeconds, + } +} + +export function serializeObject(name: string, obj: Record | null): string | null { + if (obj === null) { + return null + } else if (typeof obj === 'object' && obj !== null) { + return JSON.stringify(obj) + } + throw new Error(`${name} must be either an object or null`) +} + +export function deserializeObject(name: string, str: any): Record | null { + if (str === null) { + return null + } else if (typeof str === 'string') { + return JSON.parse(str) + } + throw new Error(`${name} must be either a string or null`) +} diff --git a/rust/cyclotron-node/src/index.ts b/rust/cyclotron-node/src/index.ts index fb8dd659d80c3..e905c5f6cd4ad 100644 --- a/rust/cyclotron-node/src/index.ts +++ b/rust/cyclotron-node/src/index.ts @@ -1,222 +1,3 @@ -// eslint-disable-next-line @typescript-eslint/no-var-requires -const cyclotron = require('../index.node') - -export interface PoolConfig { - dbUrl: string - maxConnections?: number - minConnections?: number - acquireTimeoutSeconds?: number - maxLifetimeSeconds?: number - idleTimeoutSeconds?: number -} - -// Type as expected by Cyclotron. -interface InternalPoolConfig { - db_url: string - max_connections?: number - min_connections?: number - acquire_timeout_seconds?: number - max_lifetime_seconds?: number - idle_timeout_seconds?: number -} - -export interface ManagerConfig { - shards: PoolConfig[] -} - -// Type as expected by Cyclotron. -interface InternalManagerConfig { - shards: InternalPoolConfig[] -} - -export interface JobInit { - teamId: number - functionId: string - queueName: string - priority?: number - scheduled?: Date - vmState?: string - parameters?: string - blob?: Uint8Array - metadata?: string -} - -// Type as expected by Cyclotron. -interface InternalJobInit { - team_id: number - function_id: string - queue_name: string - priority?: number - scheduled?: Date - vm_state?: string - parameters?: string - metadata?: string -} - -export type JobState = 'available' | 'running' | 'completed' | 'failed' | 'paused' - -export interface Job { - id: string - teamId: number - functionId: string | null - created: Date - lockId: string | null - lastHeartbeat: Date | null - janitorTouchCount: number - transitionCount: number - lastTransition: Date - queueName: string - state: JobState - priority: number - scheduled: Date - vmState: string | null - metadata: string | null - parameters: string | null - blob: Uint8Array | null -} - -export async function initWorker(poolConfig: PoolConfig): Promise { - const initWorkerInternal: InternalPoolConfig = { - db_url: poolConfig.dbUrl, - max_connections: poolConfig.maxConnections, - min_connections: poolConfig.minConnections, - acquire_timeout_seconds: poolConfig.acquireTimeoutSeconds, - max_lifetime_seconds: poolConfig.maxLifetimeSeconds, - idle_timeout_seconds: poolConfig.idleTimeoutSeconds, - } - return await cyclotron.initWorker(JSON.stringify(initWorkerInternal)) -} - -export async function initManager(managerConfig: ManagerConfig): Promise { - const managerConfigInternal: InternalManagerConfig = { - shards: managerConfig.shards.map((shard) => ({ - db_url: shard.dbUrl, - max_connections: shard.maxConnections, - min_connections: shard.minConnections, - acquire_timeout_seconds: shard.acquireTimeoutSeconds, - max_lifetime_seconds: shard.maxLifetimeSeconds, - idle_timeout_seconds: shard.idleTimeoutSeconds, - })), - } - return await cyclotron.initManager(JSON.stringify(managerConfigInternal)) -} - -export async function maybeInitWorker(poolConfig: PoolConfig): Promise { - const initWorkerInternal: InternalPoolConfig = { - db_url: poolConfig.dbUrl, - max_connections: poolConfig.maxConnections, - min_connections: poolConfig.minConnections, - acquire_timeout_seconds: poolConfig.acquireTimeoutSeconds, - max_lifetime_seconds: poolConfig.maxLifetimeSeconds, - idle_timeout_seconds: poolConfig.idleTimeoutSeconds, - } - return await cyclotron.maybeInitWorker(JSON.stringify(initWorkerInternal)) -} - -export async function maybeInitManager(managerConfig: ManagerConfig): Promise { - const managerConfigInternal: InternalManagerConfig = { - shards: managerConfig.shards.map((shard) => ({ - db_url: shard.dbUrl, - max_connections: shard.maxConnections, - min_connections: shard.minConnections, - acquire_timeout_seconds: shard.acquireTimeoutSeconds, - max_lifetime_seconds: shard.maxLifetimeSeconds, - idle_timeout_seconds: shard.idleTimeoutSeconds, - })), - } - return await cyclotron.maybeInitManager(JSON.stringify(managerConfigInternal)) -} - -export async function createJob(job: JobInit): Promise { - job.priority ??= 1 - job.scheduled ??= new Date() - - const jobInitInternal: InternalJobInit = { - team_id: job.teamId, - function_id: job.functionId, - queue_name: job.queueName, - priority: job.priority, - scheduled: job.scheduled, - vm_state: job.vmState, - parameters: job.parameters, - metadata: job.metadata, - } - - const json = JSON.stringify(jobInitInternal) - return await cyclotron.createJob(json, job.blob ? job.blob.buffer : undefined) -} - -export async function dequeueJobs(queueName: string, limit: number): Promise { - return await cyclotron.dequeueJobs(queueName, limit) -} - -export async function dequeueJobsWithVmState(queueName: string, limit: number): Promise { - return await cyclotron.dequeueJobsWithVmState(queueName, limit) -} - -export async function flushJob(jobId: string): Promise { - return await cyclotron.flushJob(jobId) -} - -export function setState(jobId: string, jobState: JobState): Promise { - return cyclotron.setState(jobId, jobState) -} - -export function setQueue(jobId: string, queueName: string): Promise { - return cyclotron.setQueue(jobId, queueName) -} - -export function setPriority(jobId: string, priority: number): Promise { - return cyclotron.setPriority(jobId, priority) -} - -export function setScheduledAt(jobId: string, scheduledAt: Date): Promise { - return cyclotron.setScheduledAt(jobId, scheduledAt.toISOString()) -} - -export function serializeObject(name: string, obj: Record | null): string | null { - if (obj === null) { - return null - } else if (typeof obj === 'object' && obj !== null) { - return JSON.stringify(obj) - } - throw new Error(`${name} must be either an object or null`) -} - -export function setVmState(jobId: string, vmState: Record | null): Promise { - const serialized = serializeObject('vmState', vmState) - return cyclotron.setVmState(jobId, serialized) -} - -export function setMetadata(jobId: string, metadata: Record | null): Promise { - const serialized = serializeObject('metadata', metadata) - return cyclotron.setMetadata(jobId, serialized) -} - -export function setParameters(jobId: string, parameters: Record | null): Promise { - const serialized = serializeObject('parameters', parameters) - return cyclotron.setParameters(jobId, serialized) -} - -export function setBlob(jobId: string, blob: Uint8Array | null): Promise { - return cyclotron.setBlob(jobId, blob) -} - -export default { - initWorker, - initManager, - maybeInitWorker, - maybeInitManager, - createJob, - dequeueJobs, - dequeueJobsWithVmState, - flushJob, - setState, - setQueue, - setPriority, - setScheduledAt, - setVmState, - setMetadata, - setParameters, - setBlob, -} +export * from './manager' +export * from './types' +export * from './worker' diff --git a/rust/cyclotron-node/src/manager.ts b/rust/cyclotron-node/src/manager.ts new file mode 100644 index 0000000000000..bba6488828ba2 --- /dev/null +++ b/rust/cyclotron-node/src/manager.ts @@ -0,0 +1,39 @@ +// eslint-disable-next-line @typescript-eslint/no-var-requires +const cyclotron = require('../index.node') + +import { convertToInternalPoolConfig, serializeObject } from './helpers' +import { CyclotronJobInit, CyclotronPoolConfig } from './types' + +export class CyclotronManager { + constructor(private config: { shards: CyclotronPoolConfig[] }) { + this.config = config + } + + async connect(): Promise { + return await cyclotron.maybeInitManager( + JSON.stringify({ + shards: this.config.shards.map((shard) => convertToInternalPoolConfig(shard)), + }) + ) + } + + async createJob(job: CyclotronJobInit): Promise { + job.priority ??= 1 + job.scheduled ??= new Date() + + // TODO: Why is this type of job snake case whereas the dequeue return type is camel case? + const jobInitInternal = { + team_id: job.teamId, + function_id: job.functionId, + queue_name: job.queueName, + priority: job.priority, + scheduled: job.scheduled, + vm_state: job.vmState ? serializeObject('vmState', job.vmState) : null, + parameters: job.parameters ? serializeObject('parameters', job.parameters) : null, + metadata: job.metadata ? serializeObject('metadata', job.metadata) : null, + } + + const json = JSON.stringify(jobInitInternal) + return await cyclotron.createJob(json, job.blob ? job.blob.buffer : undefined) + } +} diff --git a/rust/cyclotron-node/src/types.ts b/rust/cyclotron-node/src/types.ts new file mode 100644 index 0000000000000..88c8a26099083 --- /dev/null +++ b/rust/cyclotron-node/src/types.ts @@ -0,0 +1,48 @@ +export type CyclotronPoolConfig = { + dbUrl: string + maxConnections?: number + minConnections?: number + acquireTimeoutSeconds?: number + maxLifetimeSeconds?: number + idleTimeoutSeconds?: number +} + +// Type as expected by Cyclotron. +export type CyclotronInternalPoolConfig = { + db_url: string + max_connections?: number + min_connections?: number + acquire_timeout_seconds?: number + max_lifetime_seconds?: number + idle_timeout_seconds?: number +} + +export type CyclotronJobState = 'available' | 'running' | 'completed' | 'failed' | 'paused' + +export type CyclotronJob = { + id: string + teamId: number + functionId: string | null + created: Date + lockId: string | null + lastHeartbeat: Date | null + janitorTouchCount: number + transitionCount: number + lastTransition: Date + queueName: string + state: CyclotronJobState + priority: number + scheduled: Date + vmState: object | null + metadata: object | null + parameters: object | null + blob: Uint8Array | null +} + +export type CyclotronJobInit = Pick & + Pick, 'scheduled' | 'vmState' | 'parameters' | 'metadata' | 'blob'> + +export type CyclotronJobUpdate = Pick< + Partial, + 'queueName' | 'priority' | 'vmState' | 'parameters' | 'metadata' | 'blob' +> diff --git a/rust/cyclotron-node/src/worker.ts b/rust/cyclotron-node/src/worker.ts new file mode 100644 index 0000000000000..7b3411863af7d --- /dev/null +++ b/rust/cyclotron-node/src/worker.ts @@ -0,0 +1,120 @@ +// eslint-disable-next-line @typescript-eslint/no-var-requires +const cyclotron = require('../index.node') +import { convertToInternalPoolConfig, deserializeObject, serializeObject } from './helpers' +import { CyclotronJob, CyclotronJobState, CyclotronJobUpdate, CyclotronPoolConfig } from './types' + +const parseJob = (job: CyclotronJob): CyclotronJob => { + return { + ...job, + vmState: deserializeObject('vmState', job.vmState), + metadata: deserializeObject('metadata', job.metadata), + parameters: deserializeObject('parameters', job.parameters), + } +} + +export type CyclotronWorkerConfig = { + pool: CyclotronPoolConfig + /** The queue to be consumed from */ + queueName: string + /** Max number of jobs to consume in a batch. Default: 100 */ + batchMaxSize?: number + /** Whether the vmState will be included or not */ + includeVmState?: boolean + /** Amount of delay between dequeue polls. Default: 50ms */ + pollDelayMs?: number + /** Heartbeat timeout. After this time without response from the worker loop the worker will be considered unhealthy. Default 30000 */ + heartbeatTimeoutMs?: number +} + +export class CyclotronWorker { + isConsuming: boolean = false + lastHeartbeat: Date = new Date() + + private consumerLoopPromise: Promise | null = null + + constructor(private config: CyclotronWorkerConfig) { + this.config = config + } + + public isHealthy(): boolean { + return ( + this.isConsuming && + new Date().getTime() - this.lastHeartbeat.getTime() < (this.config.heartbeatTimeoutMs ?? 30000) + ) + } + + async connect(processBatch: (jobs: CyclotronJob[]) => Promise): Promise { + if (this.isConsuming) { + throw new Error('Already consuming') + } + + await cyclotron.maybeInitWorker(JSON.stringify(convertToInternalPoolConfig(this.config.pool))) + + this.isConsuming = true + this.consumerLoopPromise = this.startConsumerLoop(processBatch).finally(() => { + this.isConsuming = false + this.consumerLoopPromise = null + }) + } + + private async startConsumerLoop(processBatch: (jobs: CyclotronJob[]) => Promise): Promise { + try { + this.isConsuming = true + + const batchMaxSize = this.config.batchMaxSize ?? 100 + const pollDelayMs = this.config.pollDelayMs ?? 50 + + while (this.isConsuming) { + this.lastHeartbeat = new Date() + + const jobs = ( + this.config.includeVmState + ? await cyclotron.dequeueJobsWithVmState(this.config.queueName, batchMaxSize) + : await cyclotron.dequeueJobs(this.config.queueName, batchMaxSize) + ).map(parseJob) + + if (!jobs.length) { + // Wait a bit before polling again + await new Promise((resolve) => setTimeout(resolve, pollDelayMs)) + continue + } + + await processBatch(jobs) + } + } catch (e) { + // We only log here so as not to crash the parent process + console.error('[Cyclotron] Error in worker loop', e) + } + } + + async disconnect(): Promise { + this.isConsuming = false + await (this.consumerLoopPromise ?? Promise.resolve()) + } + + async flushJob(jobId: string): Promise { + return await cyclotron.flushJob(jobId) + } + + updateJob(id: CyclotronJob['id'], state: CyclotronJobState, updates?: CyclotronJobUpdate): void { + cyclotron.setState(id, state) + if (updates?.queueName) { + cyclotron.setQueue(id, updates.queueName) + } + if (updates?.priority) { + cyclotron.setPriority(id, updates.priority) + } + if (updates?.parameters) { + cyclotron.setParameters(id, serializeObject('parameters', updates.parameters)) + } + if (updates?.metadata) { + cyclotron.setMetadata(id, serializeObject('metadata', updates.metadata)) + } + if (updates?.vmState) { + cyclotron.setVmState(id, serializeObject('vmState', updates.vmState)) + } + if (updates?.blob) { + cyclotron.setBlob(id, updates.blob) + } + } +} diff --git a/rust/feature-flags/Cargo.toml b/rust/feature-flags/Cargo.toml index b43d09cc93d2f..3d898dfdbfa72 100644 --- a/rust/feature-flags/Cargo.toml +++ b/rust/feature-flags/Cargo.toml @@ -31,6 +31,7 @@ regex = "1.10.4" maxminddb = "0.17" sqlx = { workspace = true } uuid = { workspace = true } +common-alloc = { path = "../common/alloc" } [lints] workspace = true diff --git a/rust/feature-flags/src/main.rs b/rust/feature-flags/src/main.rs index 980db6973893f..46cc1be270b27 100644 --- a/rust/feature-flags/src/main.rs +++ b/rust/feature-flags/src/main.rs @@ -7,6 +7,8 @@ use tracing_subscriber::{EnvFilter, Layer}; use feature_flags::config::Config; use feature_flags::server::serve; +common_alloc::used!(); + async fn shutdown() { let mut term = signal::unix::signal(signal::unix::SignalKind::terminate()) .expect("failed to register SIGTERM handler"); diff --git a/rust/hook-api/Cargo.toml b/rust/hook-api/Cargo.toml index 7887e8e49a8e2..87057fa8c74fd 100644 --- a/rust/hook-api/Cargo.toml +++ b/rust/hook-api/Cargo.toml @@ -23,3 +23,4 @@ tracing = { workspace = true } tracing-subscriber = { workspace = true } url = { workspace = true } common-metrics = { path = "../common/metrics" } +common-alloc = { path = "../common/alloc" } diff --git a/rust/hook-api/src/main.rs b/rust/hook-api/src/main.rs index 1f84abb4e4665..0491d49eea3be 100644 --- a/rust/hook-api/src/main.rs +++ b/rust/hook-api/src/main.rs @@ -9,6 +9,8 @@ use hook_common::pgqueue::PgQueue; mod config; mod handlers; +common_alloc::used!(); + async fn listen(app: Router, bind: String) -> Result<()> { let listener = tokio::net::TcpListener::bind(bind).await?; diff --git a/rust/hook-janitor/Cargo.toml b/rust/hook-janitor/Cargo.toml index dba9bef7e7046..70d6e263296e6 100644 --- a/rust/hook-janitor/Cargo.toml +++ b/rust/hook-janitor/Cargo.toml @@ -26,3 +26,4 @@ tracing = { workspace = true } tracing-subscriber = { workspace = true } common-metrics = { path = "../common/metrics" } common-kafka = { path = "../common/kafka" } +common-alloc = { path = "../common/alloc" } diff --git a/rust/hook-janitor/src/main.rs b/rust/hook-janitor/src/main.rs index 6ca27fa6e6d6f..de8910bcff97b 100644 --- a/rust/hook-janitor/src/main.rs +++ b/rust/hook-janitor/src/main.rs @@ -17,6 +17,8 @@ mod config; mod handlers; mod webhooks; +common_alloc::used!(); + async fn listen(app: Router, bind: String) -> Result<()> { let listener = tokio::net::TcpListener::bind(bind).await?; diff --git a/rust/hook-worker/Cargo.toml b/rust/hook-worker/Cargo.toml index fdc6f150dfec9..4670116572a04 100644 --- a/rust/hook-worker/Cargo.toml +++ b/rust/hook-worker/Cargo.toml @@ -28,6 +28,7 @@ url = { version = "2.2" } common-metrics = { path = "../common/metrics" } common-dns = { path = "../common/dns" } common-kafka = { path = "../common/kafka" } +common-alloc = { path = "../common/alloc" } [dev-dependencies] httpmock = { workspace = true } diff --git a/rust/hook-worker/src/main.rs b/rust/hook-worker/src/main.rs index 0aeae27e0a3a0..7aa0845247a92 100644 --- a/rust/hook-worker/src/main.rs +++ b/rust/hook-worker/src/main.rs @@ -13,6 +13,8 @@ use hook_worker::config::Config; use hook_worker::error::WorkerError; use hook_worker::worker::WebhookWorker; +common_alloc::used!(); + #[tokio::main] async fn main() -> Result<(), WorkerError> { tracing_subscriber::fmt::init(); diff --git a/rust/property-defs-rs/Cargo.toml b/rust/property-defs-rs/Cargo.toml index 6deb3bc0c22f7..f0ec58d5a6fac 100644 --- a/rust/property-defs-rs/Cargo.toml +++ b/rust/property-defs-rs/Cargo.toml @@ -21,6 +21,7 @@ metrics = { workspace = true } chrono = { workspace = true } quick_cache = { workspace = true } common-metrics = { path = "../common/metrics" } +common-alloc = { path = "../common/alloc" } ahash = { workspace = true } uuid = { workspace = true } diff --git a/rust/property-defs-rs/src/config.rs b/rust/property-defs-rs/src/config.rs index 7afe4589944f9..41494a2eab6ba 100644 --- a/rust/property-defs-rs/src/config.rs +++ b/rust/property-defs-rs/src/config.rs @@ -1,3 +1,5 @@ +use std::{num::ParseIntError, str::FromStr}; + use envconfig::Envconfig; use rdkafka::ClientConfig; @@ -92,6 +94,16 @@ pub struct Config { #[envconfig(from = "BIND_PORT", default = "3301")] pub port: u16, + + // The set of teams to opt-in or opt-out of property definitions processing (depending on the setting below) + #[envconfig(default = "")] + pub filtered_teams: TeamList, + + // Whether the team list above is used to filter teams OUT of processing (opt-out) or IN to processing (opt-in). + // Defaults to opt-in for now, skipping all updates for teams not in the list. TODO - change this to opt-out + // once rollout is complete. + #[envconfig(default = "opt_in")] + pub filter_mode: TeamFilterMode, } #[derive(Envconfig, Clone)] @@ -125,3 +137,54 @@ impl From<&KafkaConfig> for ClientConfig { client_config } } + +#[derive(Clone)] +pub struct TeamList { + pub teams: Vec, +} + +impl FromStr for TeamList { + type Err = ParseIntError; + + fn from_str(s: &str) -> Result { + let mut teams = Vec::new(); + for team in s.trim().split(',') { + if team.is_empty() { + continue; + } + teams.push(team.parse()?); + } + Ok(TeamList { teams }) + } +} + +#[derive(Clone)] +pub enum TeamFilterMode { + OptIn, + OptOut, +} + +impl FromStr for TeamFilterMode { + type Err = String; + + fn from_str(s: &str) -> Result { + match s.to_lowercase().trim() { + "opt_in" => Ok(TeamFilterMode::OptIn), + "opt_out" => Ok(TeamFilterMode::OptOut), + "opt-in" => Ok(TeamFilterMode::OptIn), + "opt-out" => Ok(TeamFilterMode::OptOut), + "optin" => Ok(TeamFilterMode::OptIn), + "optout" => Ok(TeamFilterMode::OptOut), + _ => Err(format!("Invalid team filter mode: {}", s)), + } + } +} + +impl TeamFilterMode { + pub fn should_process(&self, list: &[i32], team_id: i32) -> bool { + match self { + TeamFilterMode::OptIn => list.contains(&team_id), + TeamFilterMode::OptOut => !list.contains(&team_id), + } + } +} diff --git a/rust/property-defs-rs/src/main.rs b/rust/property-defs-rs/src/main.rs index 2fa7b94614081..846b307ee6876 100644 --- a/rust/property-defs-rs/src/main.rs +++ b/rust/property-defs-rs/src/main.rs @@ -6,12 +6,13 @@ use envconfig::Envconfig; use futures::future::ready; use property_defs_rs::{ app_context::AppContext, - config::Config, + config::{Config, TeamFilterMode, TeamList}, message_to_event, metrics_consts::{ BATCH_ACQUIRE_TIME, CACHE_CONSUMED, COMPACTED_UPDATES, EVENTS_RECEIVED, FORCED_SMALL_BATCH, - PERMIT_WAIT_TIME, RECV_DEQUEUED, TRANSACTION_LIMIT_SATURATION, UPDATES_FILTERED_BY_CACHE, - UPDATES_PER_EVENT, UPDATES_SEEN, UPDATE_ISSUE_TIME, WORKER_BLOCKED, + PERMIT_WAIT_TIME, RECV_DEQUEUED, SKIPPED_DUE_TO_TEAM_FILTER, TRANSACTION_LIMIT_SATURATION, + UPDATES_FILTERED_BY_CACHE, UPDATES_PER_EVENT, UPDATES_SEEN, UPDATE_ISSUE_TIME, + WORKER_BLOCKED, }, types::Update, }; @@ -31,6 +32,8 @@ use tokio::{ use tracing::{info, warn}; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Layer}; +common_alloc::used!(); + fn setup_tracing() { let log_layer: tracing_subscriber::filter::Filtered< tracing_subscriber::fmt::Layer, @@ -68,6 +71,8 @@ async fn spawn_producer_loop( shared_cache: Arc>, skip_threshold: usize, compaction_batch_size: usize, + team_filter_mode: TeamFilterMode, + team_list: TeamList, ) { let mut batch = AHashSet::with_capacity(compaction_batch_size); let mut last_send = tokio::time::Instant::now(); @@ -81,6 +86,11 @@ async fn spawn_producer_loop( continue; }; + if !team_filter_mode.should_process(&team_list.teams, event.team_id) { + metrics::counter!(SKIPPED_DUE_TO_TEAM_FILTER).increment(1); + continue; + } + let updates = event.into_updates(skip_threshold); metrics::counter!(EVENTS_RECEIVED).increment(1); @@ -93,29 +103,34 @@ async fn spawn_producer_loop( continue; } batch.insert(update); + } - if batch.len() >= compaction_batch_size || last_send.elapsed() > Duration::from_secs(10) - { - last_send = tokio::time::Instant::now(); - for update in batch.drain() { - if shared_cache.get(&update).is_some() { - metrics::counter!(UPDATES_FILTERED_BY_CACHE).increment(1); - continue; + // We do the full batch insert before checking the time/batch size, because if we did this + // inside the for update in updates loop, under extremely low-load situations, we'd push a + // single update into the channel, then push the rest into the batch, and loop around to + // wait on the next event, which might come an arbitrary amount of time later. This bit me + // in testing, and while it's not a correctness problem and under normal load we'd never + // see it, we may as well just do the full batch insert first. + if batch.len() >= compaction_batch_size || last_send.elapsed() > Duration::from_secs(10) { + last_send = tokio::time::Instant::now(); + for update in batch.drain() { + if shared_cache.get(&update).is_some() { + metrics::counter!(UPDATES_FILTERED_BY_CACHE).increment(1); + continue; + } + shared_cache.insert(update.clone(), ()); + match channel.try_send(update) { + Ok(_) => {} + Err(TrySendError::Full(update)) => { + warn!("Worker blocked"); + metrics::counter!(WORKER_BLOCKED).increment(1); + // Workers should just die if the channel is dropped, since that indicates + // the main loop is dead. + channel.send(update).await.unwrap(); } - shared_cache.insert(update.clone(), ()); - match channel.try_send(update) { - Ok(_) => {} - Err(TrySendError::Full(update)) => { - warn!("Worker blocked"); - metrics::counter!(WORKER_BLOCKED).increment(1); - // Workers should just die if the channel is dropped, since that indicates - // the main loop is dead. - channel.send(update).await.unwrap(); - } - Err(e) => { - warn!("Coordinator send failed: {:?}", e); - return; - } + Err(e) => { + warn!("Coordinator send failed: {:?}", e); + return; } } } @@ -153,6 +168,8 @@ async fn main() -> Result<(), Box> { cache.clone(), config.update_count_skip_threshold, config.compaction_batch_size, + config.filter_mode.clone(), + config.filtered_teams.clone(), )); } diff --git a/rust/property-defs-rs/src/metrics_consts.rs b/rust/property-defs-rs/src/metrics_consts.rs index e9920c0c6cb1c..d8c3c83a6d17f 100644 --- a/rust/property-defs-rs/src/metrics_consts.rs +++ b/rust/property-defs-rs/src/metrics_consts.rs @@ -20,3 +20,4 @@ pub const UPDATE_TRANSACTION_TIME: &str = "prop_defs_update_transaction_time_ms" pub const GROUP_TYPE_RESOLVE_TIME: &str = "prop_defs_group_type_resolve_time_ms"; pub const UPDATES_SKIPPED: &str = "prop_defs_skipped_updates"; pub const GROUP_TYPE_READS: &str = "prop_defs_group_type_reads"; +pub const SKIPPED_DUE_TO_TEAM_FILTER: &str = "prop_defs_skipped_due_to_team_filter";